Merge branch 'master' into documentOperation
This commit is contained in:
commit
df7a9322e9
|
@ -4,6 +4,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.hl7.fhir.dstu3.model.IdType;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
||||||
|
@ -140,5 +141,21 @@ public class AuthorizationInterceptors {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
//END SNIPPET: authorizeTenantAction
|
//END SNIPPET: authorizeTenantAction
|
||||||
|
|
||||||
|
|
||||||
|
//START SNIPPET: patchAll
|
||||||
|
new AuthorizationInterceptor(PolicyEnum.DENY) {
|
||||||
|
@Override
|
||||||
|
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
||||||
|
return new RuleBuilder()
|
||||||
|
// Authorize patch requests
|
||||||
|
.allow().patch().allRequests().andThen()
|
||||||
|
// Authorize actual writes that patch may perform
|
||||||
|
.allow().write().allResources().inCompartment("Patient", new IdType("Patient/123")).andThen()
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
//END SNIPPET: patchAll
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -156,7 +156,12 @@ public enum EncodingEnum {
|
||||||
* </p>
|
* </p>
|
||||||
*/
|
*/
|
||||||
public static EncodingEnum forContentType(String theContentType) {
|
public static EncodingEnum forContentType(String theContentType) {
|
||||||
return ourContentTypeToEncoding.get(theContentType);
|
String contentTypeSplitted = getTypeWithoutCharset(theContentType);
|
||||||
|
if (contentTypeSplitted == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return ourContentTypeToEncoding.get(contentTypeSplitted );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -170,14 +175,33 @@ public enum EncodingEnum {
|
||||||
* @see #forContentType(String)
|
* @see #forContentType(String)
|
||||||
*/
|
*/
|
||||||
public static EncodingEnum forContentTypeStrict(String theContentType) {
|
public static EncodingEnum forContentTypeStrict(String theContentType) {
|
||||||
return ourContentTypeToEncodingStrict.get(theContentType);
|
String contentTypeSplitted = getTypeWithoutCharset(theContentType);
|
||||||
|
if (contentTypeSplitted == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return ourContentTypeToEncodingStrict.get(contentTypeSplitted);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String getTypeWithoutCharset(String theContentType) {
|
||||||
|
if (theContentType == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
String[] contentTypeSplitted = theContentType.split(";");
|
||||||
|
return contentTypeSplitted[0];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is the given type a FHIR legacy (pre-DSTU3) content type?
|
* Is the given type a FHIR legacy (pre-DSTU3) content type?
|
||||||
*/
|
*/
|
||||||
public static boolean isLegacy(String theFormat) {
|
public static boolean isLegacy(String theContentType) {
|
||||||
return ourContentTypeToEncodingLegacy.containsKey(theFormat);
|
String contentTypeSplitted = getTypeWithoutCharset(theContentType);
|
||||||
|
if (contentTypeSplitted == null) {
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
return ourContentTypeToEncodingLegacy.containsKey(contentTypeSplitted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -28,9 +28,6 @@ import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.apache.commons.cli.ParseException;
|
import org.apache.commons.cli.ParseException;
|
||||||
import org.hl7.fhir.dstu3.model.Parameters;
|
|
||||||
import org.hl7.fhir.dstu3.model.StringType;
|
|
||||||
import org.hl7.fhir.dstu3.model.UriType;
|
|
||||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
|
@ -82,10 +79,17 @@ public class UploadTerminologyCommand extends BaseCommand {
|
||||||
IGenericClient client = super.newClient(theCommandLine);
|
IGenericClient client = super.newClient(theCommandLine);
|
||||||
IBaseParameters inputParameters;
|
IBaseParameters inputParameters;
|
||||||
if (ctx.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
|
if (ctx.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
|
||||||
Parameters p = new Parameters();
|
org.hl7.fhir.dstu3.model.Parameters p = new org.hl7.fhir.dstu3.model.Parameters();
|
||||||
p.addParameter().setName("url").setValue(new UriType(termUrl));
|
p.addParameter().setName("url").setValue(new org.hl7.fhir.dstu3.model.UriType(termUrl));
|
||||||
for (String next : datafile) {
|
for (String next : datafile) {
|
||||||
p.addParameter().setName("localfile").setValue(new StringType(next));
|
p.addParameter().setName("localfile").setValue(new org.hl7.fhir.dstu3.model.StringType(next));
|
||||||
|
}
|
||||||
|
inputParameters = p;
|
||||||
|
} else if (ctx.getVersion().getVersion() == FhirVersionEnum.R4) {
|
||||||
|
org.hl7.fhir.r4.model.Parameters p = new org.hl7.fhir.r4.model.Parameters();
|
||||||
|
p.addParameter().setName("url").setValue(new org.hl7.fhir.r4.model.UriType(termUrl));
|
||||||
|
for (String next : datafile) {
|
||||||
|
p.addParameter().setName("localfile").setValue(new org.hl7.fhir.r4.model.StringType(next));
|
||||||
}
|
}
|
||||||
inputParameters = p;
|
inputParameters = p;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -502,6 +502,7 @@
|
||||||
<groupId>com.github.ben-manes.caffeine</groupId>
|
<groupId>com.github.ben-manes.caffeine</groupId>
|
||||||
<artifactId>caffeine</artifactId>
|
<artifactId>caffeine</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.guava</groupId>
|
<groupId>com.google.guava</groupId>
|
||||||
<artifactId>guava-testlib</artifactId>
|
<artifactId>guava-testlib</artifactId>
|
||||||
|
|
|
@ -8,10 +8,10 @@ import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||||
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
||||||
import ca.uhn.fhir.validation.IValidatorModule;
|
import ca.uhn.fhir.validation.IValidatorModule;
|
||||||
import org.apache.commons.lang3.time.DateUtils;
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
|
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
|
||||||
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
|
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
|
||||||
import org.hl7.fhir.instance.hapi.validation.FhirInstanceValidator;
|
import org.hl7.fhir.instance.hapi.validation.FhirInstanceValidator;
|
||||||
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
|
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
|
||||||
import org.hl7.fhir.instance.utils.IResourceValidator.BestPracticeWarningLevel;
|
|
||||||
import org.hl7.fhir.r4.utils.IResourceValidator;
|
import org.hl7.fhir.r4.utils.IResourceValidator;
|
||||||
import org.springframework.beans.factory.annotation.Autowire;
|
import org.springframework.beans.factory.annotation.Autowire;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
|
@ -81,7 +81,7 @@ public class BaseDstu2Config extends BaseConfig {
|
||||||
public IValidatorModule instanceValidatorDstu2() {
|
public IValidatorModule instanceValidatorDstu2() {
|
||||||
FhirInstanceValidator retVal = new FhirInstanceValidator();
|
FhirInstanceValidator retVal = new FhirInstanceValidator();
|
||||||
retVal.setBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Warning);
|
retVal.setBestPracticeWarningLevel(IResourceValidator.BestPracticeWarningLevel.Warning);
|
||||||
retVal.setValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2()));
|
retVal.setValidationSupport(new CachingValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), jpaValidationSupportDstu2())));
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,6 +91,13 @@ public class BaseDstu2Config extends BaseConfig {
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean(name = "myResourceCountsCache")
|
||||||
|
public ResourceCountCache resourceCountsCache() {
|
||||||
|
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
|
||||||
|
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
@Bean(autowire = Autowire.BY_TYPE)
|
@Bean(autowire = Autowire.BY_TYPE)
|
||||||
public IFulltextSearchSvc searchDao() {
|
public IFulltextSearchSvc searchDao() {
|
||||||
FulltextSearchSvcImpl searchDao = new FulltextSearchSvcImpl();
|
FulltextSearchSvcImpl searchDao = new FulltextSearchSvcImpl();
|
||||||
|
@ -121,13 +128,6 @@ public class BaseDstu2Config extends BaseConfig {
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = "myResourceCountsCache")
|
|
||||||
public ResourceCountCache resourceCountsCache() {
|
|
||||||
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu2().getResourceCounts());
|
|
||||||
retVal.setCacheMillis(60 * DateUtils.MILLIS_PER_SECOND);
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean(autowire = Autowire.BY_TYPE)
|
@Bean(autowire = Autowire.BY_TYPE)
|
||||||
public IHapiTerminologySvc terminologyService() {
|
public IHapiTerminologySvc terminologyService() {
|
||||||
return new HapiTerminologySvcDstu2();
|
return new HapiTerminologySvcDstu2();
|
||||||
|
|
|
@ -19,6 +19,7 @@ import ca.uhn.fhir.jpa.validation.JpaValidationSupportChainDstu3;
|
||||||
import ca.uhn.fhir.validation.IValidatorModule;
|
import ca.uhn.fhir.validation.IValidatorModule;
|
||||||
import org.apache.commons.lang3.time.DateUtils;
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||||
|
import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
|
||||||
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
|
import org.hl7.fhir.dstu3.hapi.validation.FhirInstanceValidator;
|
||||||
import org.hl7.fhir.r4.utils.IResourceValidator;
|
import org.hl7.fhir.r4.utils.IResourceValidator;
|
||||||
import org.springframework.beans.factory.annotation.Autowire;
|
import org.springframework.beans.factory.annotation.Autowire;
|
||||||
|
@ -78,13 +79,17 @@ public class BaseDstu3Config extends BaseConfig {
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JpaValidationSupportChainDstu3 jpaValidationSupportChain() {
|
||||||
|
return new JpaValidationSupportChainDstu3();
|
||||||
|
}
|
||||||
|
|
||||||
@Bean(name = "myJpaValidationSupportDstu3", autowire = Autowire.BY_NAME)
|
@Bean(name = "myJpaValidationSupportDstu3", autowire = Autowire.BY_NAME)
|
||||||
public ca.uhn.fhir.jpa.dao.dstu3.IJpaValidationSupportDstu3 jpaValidationSupportDstu3() {
|
public ca.uhn.fhir.jpa.dao.dstu3.IJpaValidationSupportDstu3 jpaValidationSupportDstu3() {
|
||||||
ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
|
ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3 retVal = new ca.uhn.fhir.jpa.dao.dstu3.JpaValidationSupportDstu3();
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Bean(name = "myResourceCountsCache")
|
@Bean(name = "myResourceCountsCache")
|
||||||
public ResourceCountCache resourceCountsCache() {
|
public ResourceCountCache resourceCountsCache() {
|
||||||
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts());
|
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts());
|
||||||
|
@ -142,7 +147,7 @@ public class BaseDstu3Config extends BaseConfig {
|
||||||
@Primary
|
@Primary
|
||||||
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainDstu3")
|
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainDstu3")
|
||||||
public IValidationSupport validationSupportChainDstu3() {
|
public IValidationSupport validationSupportChainDstu3() {
|
||||||
return new JpaValidationSupportChainDstu3();
|
return new CachingValidationSupport(jpaValidationSupportChain());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ import ca.uhn.fhir.validation.IValidatorModule;
|
||||||
import org.apache.commons.lang3.time.DateUtils;
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport;
|
||||||
import org.hl7.fhir.r4.hapi.rest.server.GraphQLProvider;
|
import org.hl7.fhir.r4.hapi.rest.server.GraphQLProvider;
|
||||||
|
import org.hl7.fhir.r4.hapi.validation.CachingValidationSupport;
|
||||||
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
|
import org.hl7.fhir.r4.hapi.validation.FhirInstanceValidator;
|
||||||
import org.hl7.fhir.r4.utils.GraphQLEngine;
|
import org.hl7.fhir.r4.utils.GraphQLEngine;
|
||||||
import org.hl7.fhir.r4.utils.IResourceValidator.BestPracticeWarningLevel;
|
import org.hl7.fhir.r4.utils.IResourceValidator.BestPracticeWarningLevel;
|
||||||
|
@ -93,6 +94,11 @@ public class BaseR4Config extends BaseConfig {
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public JpaValidationSupportChainR4 jpaValidationSupportChain() {
|
||||||
|
return new JpaValidationSupportChainR4();
|
||||||
|
}
|
||||||
|
|
||||||
@Bean(name = "myJpaValidationSupportR4", autowire = Autowire.BY_NAME)
|
@Bean(name = "myJpaValidationSupportR4", autowire = Autowire.BY_NAME)
|
||||||
public ca.uhn.fhir.jpa.dao.r4.IJpaValidationSupportR4 jpaValidationSupportR4() {
|
public ca.uhn.fhir.jpa.dao.r4.IJpaValidationSupportR4 jpaValidationSupportR4() {
|
||||||
ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4 retVal = new ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4();
|
ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4 retVal = new ca.uhn.fhir.jpa.dao.r4.JpaValidationSupportR4();
|
||||||
|
@ -156,7 +162,7 @@ public class BaseR4Config extends BaseConfig {
|
||||||
@Primary
|
@Primary
|
||||||
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainR4")
|
@Bean(autowire = Autowire.BY_NAME, name = "myJpaValidationSupportChainR4")
|
||||||
public IValidationSupport validationSupportChainR4() {
|
public IValidationSupport validationSupportChainR4() {
|
||||||
return new JpaValidationSupportChainR4();
|
return new CachingValidationSupport(jpaValidationSupportChain());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,25 +1,5 @@
|
||||||
package ca.uhn.fhir.jpa.dao;
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
/*
|
|
||||||
* #%L
|
|
||||||
* HAPI FHIR JPA Server
|
|
||||||
* %%
|
|
||||||
* Copyright (C) 2014 - 2018 University Health Network
|
|
||||||
* %%
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
* #L%
|
|
||||||
*/
|
|
||||||
|
|
||||||
import ca.uhn.fhir.context.*;
|
import ca.uhn.fhir.context.*;
|
||||||
import ca.uhn.fhir.jpa.dao.data.*;
|
import ca.uhn.fhir.jpa.dao.data.*;
|
||||||
import ca.uhn.fhir.jpa.entity.*;
|
import ca.uhn.fhir.jpa.entity.*;
|
||||||
|
@ -58,7 +38,6 @@ import ca.uhn.fhir.util.*;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.collect.ArrayListMultimap;
|
import com.google.common.collect.ArrayListMultimap;
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.google.common.hash.HashFunction;
|
import com.google.common.hash.HashFunction;
|
||||||
import com.google.common.hash.Hashing;
|
import com.google.common.hash.Hashing;
|
||||||
|
@ -104,6 +83,26 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.*;
|
import static org.apache.commons.lang3.StringUtils.*;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2018 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
@SuppressWarnings("WeakerAccess")
|
@SuppressWarnings("WeakerAccess")
|
||||||
@Repository
|
@Repository
|
||||||
public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao, ApplicationContextAware {
|
public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao, ApplicationContextAware {
|
||||||
|
@ -186,6 +185,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
protected IResourceTableDao myResourceTableDao;
|
protected IResourceTableDao myResourceTableDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
protected IResourceTagDao myResourceTagDao;
|
protected IResourceTagDao myResourceTagDao;
|
||||||
|
@Autowired
|
||||||
|
protected IResourceSearchViewDao myResourceViewDao;
|
||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
private DaoConfig myConfig;
|
private DaoConfig myConfig;
|
||||||
private FhirContext myContext;
|
private FhirContext myContext;
|
||||||
|
@ -199,8 +200,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
|
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
|
||||||
@Autowired
|
@Autowired
|
||||||
private ISearchParamRegistry mySearchParamRegistry;
|
private ISearchParamRegistry mySearchParamRegistry;
|
||||||
@Autowired
|
//@Autowired
|
||||||
private ISearchResultDao mySearchResultDao;
|
//private ISearchResultDao mySearchResultDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
private IResourceIndexedCompositeStringUniqueDao myResourceIndexedCompositeStringUniqueDao;
|
private IResourceIndexedCompositeStringUniqueDao myResourceIndexedCompositeStringUniqueDao;
|
||||||
private ApplicationContext myApplicationContext;
|
private ApplicationContext myApplicationContext;
|
||||||
|
@ -227,6 +228,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
}
|
}
|
||||||
|
|
||||||
protected ExpungeOutcome doExpunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions) {
|
protected ExpungeOutcome doExpunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions) {
|
||||||
|
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
|
||||||
|
|
||||||
if (!getConfig().isExpungeEnabled()) {
|
if (!getConfig().isExpungeEnabled()) {
|
||||||
throw new MethodNotAllowedException("$expunge is not enabled on this server");
|
throw new MethodNotAllowedException("$expunge is not enabled on this server");
|
||||||
|
@ -245,32 +247,39 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
/*
|
/*
|
||||||
* Delete historical versions of deleted resources
|
* Delete historical versions of deleted resources
|
||||||
*/
|
*/
|
||||||
Pageable page = new PageRequest(0, remainingCount.get());
|
Pageable page = PageRequest.of(0, remainingCount.get());
|
||||||
Slice<Long> resourceIds;
|
Slice<Long> resourceIds = txTemplate.execute(t -> {
|
||||||
if (theResourceId != null) {
|
if (theResourceId != null) {
|
||||||
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
|
return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
|
||||||
} else {
|
|
||||||
if (theResourceName != null) {
|
|
||||||
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
|
|
||||||
} else {
|
} else {
|
||||||
resourceIds = myResourceTableDao.findIdsOfDeletedResources(page);
|
if (theResourceName != null) {
|
||||||
|
return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
|
||||||
|
} else {
|
||||||
|
return myResourceTableDao.findIdsOfDeletedResources(page);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
for (Long next : resourceIds) {
|
for (Long next : resourceIds) {
|
||||||
expungeHistoricalVersionsOfId(next, remainingCount);
|
txTemplate.execute(t -> {
|
||||||
if (remainingCount.get() <= 0) {
|
expungeHistoricalVersionsOfId(next, remainingCount);
|
||||||
return toExpungeOutcome(theExpungeOptions, remainingCount);
|
if (remainingCount.get() <= 0) {
|
||||||
}
|
return toExpungeOutcome(theExpungeOptions, remainingCount);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Delete current versions of deleted resources
|
* Delete current versions of deleted resources
|
||||||
*/
|
*/
|
||||||
for (Long next : resourceIds) {
|
for (Long next : resourceIds) {
|
||||||
expungeCurrentVersionOfResource(next);
|
txTemplate.execute(t -> {
|
||||||
if (remainingCount.get() <= 0) {
|
expungeCurrentVersionOfResource(next);
|
||||||
return toExpungeOutcome(theExpungeOptions, remainingCount);
|
if (remainingCount.get() <= 0) {
|
||||||
}
|
return toExpungeOutcome(theExpungeOptions, remainingCount);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -280,22 +289,26 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
/*
|
/*
|
||||||
* Delete historical versions of non-deleted resources
|
* Delete historical versions of non-deleted resources
|
||||||
*/
|
*/
|
||||||
Pageable page = new PageRequest(0, remainingCount.get());
|
Pageable page = PageRequest.of(0, remainingCount.get());
|
||||||
Slice<Long> historicalIds;
|
Slice<Long> historicalIds = txTemplate.execute(t -> {
|
||||||
if (theResourceId != null && theVersion != null) {
|
if (theResourceId != null && theVersion != null) {
|
||||||
historicalIds = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
|
return toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
|
||||||
} else {
|
|
||||||
if (theResourceName != null) {
|
|
||||||
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
|
|
||||||
} else {
|
} else {
|
||||||
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
|
if (theResourceName != null) {
|
||||||
|
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
|
||||||
|
} else {
|
||||||
|
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
for (Long next : historicalIds) {
|
for (Long next : historicalIds) {
|
||||||
expungeHistoricalVersion(next);
|
txTemplate.execute(t -> {
|
||||||
if (remainingCount.decrementAndGet() <= 0) {
|
expungeHistoricalVersion(next);
|
||||||
return toExpungeOutcome(theExpungeOptions, remainingCount);
|
if (remainingCount.decrementAndGet() <= 0) {
|
||||||
}
|
return toExpungeOutcome(theExpungeOptions, remainingCount);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -315,7 +328,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
});
|
});
|
||||||
txTemplate.execute(t -> {
|
txTemplate.execute(t -> {
|
||||||
doExpungeEverythingQuery("DELETE from " + SearchParamPresent.class.getSimpleName() + " d");
|
doExpungeEverythingQuery("DELETE from " + SearchParamPresent.class.getSimpleName() + " d");
|
||||||
doExpungeEverythingQuery("DELETE from " + SearchParam.class.getSimpleName() + " d");
|
|
||||||
doExpungeEverythingQuery("DELETE from " + ForcedId.class.getSimpleName() + " d");
|
doExpungeEverythingQuery("DELETE from " + ForcedId.class.getSimpleName() + " d");
|
||||||
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamDate.class.getSimpleName() + " d");
|
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamDate.class.getSimpleName() + " d");
|
||||||
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamNumber.class.getSimpleName() + " d");
|
doExpungeEverythingQuery("DELETE from " + ResourceIndexedSearchParamNumber.class.getSimpleName() + " d");
|
||||||
|
@ -704,58 +716,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
|
|
||||||
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = getDaos();
|
|
||||||
IFhirResourceDao<R> dao = (IFhirResourceDao<R>) resourceTypeToDao.get(theType);
|
|
||||||
return dao;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected IFhirResourceDao<?> getDaoOrThrowException(Class<? extends IBaseResource> theClass) {
|
|
||||||
IFhirResourceDao<? extends IBaseResource> retVal = getDao(theClass);
|
|
||||||
if (retVal == null) {
|
|
||||||
List<String> supportedResourceTypes = getDaos()
|
|
||||||
.keySet()
|
|
||||||
.stream()
|
|
||||||
.map(t->myContext.getResourceDefinition(t).getName())
|
|
||||||
.sorted()
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
|
|
||||||
}
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> getDaos() {
|
|
||||||
if (myResourceTypeToDao == null) {
|
|
||||||
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = new HashMap<>();
|
|
||||||
|
|
||||||
Map<String, IFhirResourceDao> daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
|
|
||||||
|
|
||||||
String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
|
|
||||||
|
|
||||||
for (IFhirResourceDao<?> next : daos.values()) {
|
|
||||||
resourceTypeToDao.put(next.getResourceType(), next);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this instanceof IFhirResourceDao<?>) {
|
|
||||||
IFhirResourceDao<?> thiz = (IFhirResourceDao<?>) this;
|
|
||||||
resourceTypeToDao.put(thiz.getResourceType(), thiz);
|
|
||||||
}
|
|
||||||
|
|
||||||
myResourceTypeToDao = resourceTypeToDao;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Collections.unmodifiableMap(myResourceTypeToDao);
|
|
||||||
}
|
|
||||||
|
|
||||||
@PostConstruct
|
|
||||||
public void startClearCaches() {
|
|
||||||
myResourceTypeToDao = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
protected Set<ResourceIndexedSearchParamCoords> extractSearchParamCoords(ResourceTable theEntity, IBaseResource theResource) {
|
protected Set<ResourceIndexedSearchParamCoords> extractSearchParamCoords(ResourceTable theEntity, IBaseResource theResource) {
|
||||||
return mySearchParamExtractor.extractSearchParamCoords(theEntity, theResource);
|
return mySearchParamExtractor.extractSearchParamCoords(theEntity, theResource);
|
||||||
}
|
}
|
||||||
|
@ -910,7 +870,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
param = new ResourceIndexedSearchParamQuantity();
|
param = new ResourceIndexedSearchParamQuantity();
|
||||||
break;
|
break;
|
||||||
case STRING:
|
case STRING:
|
||||||
param = new ResourceIndexedSearchParamString();
|
param = new ResourceIndexedSearchParamString()
|
||||||
|
.setDaoConfig(myConfig);
|
||||||
break;
|
break;
|
||||||
case TOKEN:
|
case TOKEN:
|
||||||
param = new ResourceIndexedSearchParamToken();
|
param = new ResourceIndexedSearchParamToken();
|
||||||
|
@ -957,18 +918,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
return myConfig;
|
return myConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
|
|
||||||
/*
|
|
||||||
* We do a null check here because Smile's module system tries to
|
|
||||||
* initialize the application context twice if two modules depend on
|
|
||||||
* the persistence module. The second time sets the dependency's appctx.
|
|
||||||
*/
|
|
||||||
if (myApplicationContext == null) {
|
|
||||||
myApplicationContext = theApplicationContext;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setConfig(DaoConfig theConfig) {
|
public void setConfig(DaoConfig theConfig) {
|
||||||
myConfig = theConfig;
|
myConfig = theConfig;
|
||||||
}
|
}
|
||||||
|
@ -995,6 +944,50 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
|
||||||
|
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = getDaos();
|
||||||
|
IFhirResourceDao<R> dao = (IFhirResourceDao<R>) resourceTypeToDao.get(theType);
|
||||||
|
return dao;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected IFhirResourceDao<?> getDaoOrThrowException(Class<? extends IBaseResource> theClass) {
|
||||||
|
IFhirResourceDao<? extends IBaseResource> retVal = getDao(theClass);
|
||||||
|
if (retVal == null) {
|
||||||
|
List<String> supportedResourceTypes = getDaos()
|
||||||
|
.keySet()
|
||||||
|
.stream()
|
||||||
|
.map(t -> myContext.getResourceDefinition(t).getName())
|
||||||
|
.sorted()
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> getDaos() {
|
||||||
|
if (myResourceTypeToDao == null) {
|
||||||
|
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = new HashMap<>();
|
||||||
|
|
||||||
|
Map<String, IFhirResourceDao> daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
|
||||||
|
|
||||||
|
String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
|
||||||
|
|
||||||
|
for (IFhirResourceDao<?> next : daos.values()) {
|
||||||
|
resourceTypeToDao.put(next.getResourceType(), next);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this instanceof IFhirResourceDao<?>) {
|
||||||
|
IFhirResourceDao<?> thiz = (IFhirResourceDao<?>) this;
|
||||||
|
resourceTypeToDao.put(thiz.getResourceType(), thiz);
|
||||||
|
}
|
||||||
|
|
||||||
|
myResourceTypeToDao = resourceTypeToDao;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Collections.unmodifiableMap(myResourceTypeToDao);
|
||||||
|
}
|
||||||
|
|
||||||
public IResourceIndexedCompositeStringUniqueDao getResourceIndexedCompositeStringUniqueDao() {
|
public IResourceIndexedCompositeStringUniqueDao getResourceIndexedCompositeStringUniqueDao() {
|
||||||
return myResourceIndexedCompositeStringUniqueDao;
|
return myResourceIndexedCompositeStringUniqueDao;
|
||||||
}
|
}
|
||||||
|
@ -1172,9 +1165,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SearchBuilder newSearchBuilder() {
|
public SearchBuilder newSearchBuilder() {
|
||||||
SearchBuilder builder = new SearchBuilder(getContext(), myEntityManager, myFulltextSearchSvc, this, myResourceIndexedSearchParamUriDao,
|
SearchBuilder builder = new SearchBuilder(
|
||||||
myForcedIdDao,
|
getContext(), myEntityManager, myFulltextSearchSvc, this, myResourceIndexedSearchParamUriDao,
|
||||||
myTerminologySvc, mySerarchParamRegistry);
|
myForcedIdDao, myTerminologySvc, mySerarchParamRegistry, myResourceTagDao, myResourceViewDao);
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1223,7 +1216,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void populateResourceIdFromEntity(BaseHasResource theEntity, final IBaseResource theResource) {
|
private void populateResourceIdFromEntity(IBaseResourceEntity theEntity, final IBaseResource theResource) {
|
||||||
IIdType id = theEntity.getIdDt();
|
IIdType id = theEntity.getIdDt();
|
||||||
if (getContext().getVersion().getVersion().isRi()) {
|
if (getContext().getVersion().getVersion().isRi()) {
|
||||||
id = getContext().getVersion().newIdType().setValue(id.getValue());
|
id = getContext().getVersion().newIdType().setValue(id.getValue());
|
||||||
|
@ -1355,7 +1348,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation, IResource res) {
|
private <R extends IBaseResource> R populateResourceMetadataHapi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IResource res) {
|
||||||
R retVal = (R) res;
|
R retVal = (R) res;
|
||||||
if (theEntity.getDeleted() != null) {
|
if (theEntity.getDeleted() != null) {
|
||||||
res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
|
res = (IResource) myContext.getResourceDefinition(theResourceType).newInstance();
|
||||||
|
@ -1384,7 +1377,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
|
ResourceMetadataKeyEnum.UPDATED.put(res, theEntity.getUpdated());
|
||||||
IDao.RESOURCE_PID.put(res, theEntity.getId());
|
IDao.RESOURCE_PID.put(res, theEntity.getId());
|
||||||
|
|
||||||
Collection<? extends BaseTag> tags = theEntity.getTags();
|
Collection<? extends BaseTag> tags = theTagList;
|
||||||
if (theEntity.isHasTags()) {
|
if (theEntity.isHasTags()) {
|
||||||
TagList tagList = new TagList();
|
TagList tagList = new TagList();
|
||||||
List<IBaseCoding> securityLabels = new ArrayList<>();
|
List<IBaseCoding> securityLabels = new ArrayList<>();
|
||||||
|
@ -1421,7 +1414,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation, IAnyResource res) {
|
private <R extends IBaseResource> R populateResourceMetadataRi(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<? extends BaseTag> theTagList, boolean theForHistoryOperation, IAnyResource res) {
|
||||||
R retVal = (R) res;
|
R retVal = (R) res;
|
||||||
if (theEntity.getDeleted() != null) {
|
if (theEntity.getDeleted() != null) {
|
||||||
res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance();
|
res = (IAnyResource) myContext.getResourceDefinition(theResourceType).newInstance();
|
||||||
|
@ -1454,7 +1447,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
|
res.getMeta().setLastUpdated(theEntity.getUpdatedDate());
|
||||||
IDao.RESOURCE_PID.put(res, theEntity.getId());
|
IDao.RESOURCE_PID.put(res, theEntity.getId());
|
||||||
|
|
||||||
Collection<? extends BaseTag> tags = theEntity.getTags();
|
Collection<? extends BaseTag> tags = theTagList;
|
||||||
|
|
||||||
if (theEntity.isHasTags()) {
|
if (theEntity.isHasTags()) {
|
||||||
for (BaseTag next : tags) {
|
for (BaseTag next : tags) {
|
||||||
|
@ -1490,6 +1483,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
// nothing
|
// nothing
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database
|
||||||
|
*
|
||||||
|
* @param theEntity The resource
|
||||||
|
*/
|
||||||
|
protected void postDelete(ResourceTable theEntity) {
|
||||||
|
// nothing
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database
|
* Subclasses may override to provide behaviour. Called when a pre-existing resource has been updated in the database
|
||||||
*
|
*
|
||||||
|
@ -1536,6 +1538,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
|
||||||
|
/*
|
||||||
|
* We do a null check here because Smile's module system tries to
|
||||||
|
* initialize the application context twice if two modules depend on
|
||||||
|
* the persistence module. The second time sets the dependency's appctx.
|
||||||
|
*/
|
||||||
|
if (myApplicationContext == null) {
|
||||||
|
myApplicationContext = theApplicationContext;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void setUpdatedTime(Collection<? extends BaseResourceIndexedSearchParam> theParams, Date theUpdateTime) {
|
private void setUpdatedTime(Collection<? extends BaseResourceIndexedSearchParam> theParams, Date theUpdateTime) {
|
||||||
for (BaseResourceIndexedSearchParam nextSearchParam : theParams) {
|
for (BaseResourceIndexedSearchParam nextSearchParam : theParams) {
|
||||||
nextSearchParam.setUpdated(theUpdateTime);
|
nextSearchParam.setUpdated(theUpdateTime);
|
||||||
|
@ -1592,6 +1606,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void startClearCaches() {
|
||||||
|
myResourceTypeToDao = null;
|
||||||
|
}
|
||||||
|
|
||||||
private ExpungeOutcome toExpungeOutcome(ExpungeOptions theExpungeOptions, AtomicInteger theRemainingCount) {
|
private ExpungeOutcome toExpungeOutcome(ExpungeOptions theExpungeOptions, AtomicInteger theRemainingCount) {
|
||||||
return new ExpungeOutcome()
|
return new ExpungeOutcome()
|
||||||
.setDeletedCount(theExpungeOptions.getLimit() - theRemainingCount.get());
|
.setDeletedCount(theExpungeOptions.getLimit() - theRemainingCount.get());
|
||||||
|
@ -1601,31 +1620,50 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
public IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation) {
|
public IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation) {
|
||||||
RuntimeResourceDefinition type = myContext.getResourceDefinition(theEntity.getResourceType());
|
RuntimeResourceDefinition type = myContext.getResourceDefinition(theEntity.getResourceType());
|
||||||
Class<? extends IBaseResource> resourceType = type.getImplementingClass();
|
Class<? extends IBaseResource> resourceType = type.getImplementingClass();
|
||||||
return toResource(resourceType, theEntity, theForHistoryOperation);
|
return toResource(resourceType, theEntity, null, theForHistoryOperation);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Override
|
@Override
|
||||||
public <R extends IBaseResource> R toResource(Class<R> theResourceType, BaseHasResource theEntity,
|
public <R extends IBaseResource> R toResource(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<ResourceTag> theTagList, boolean theForHistoryOperation) {
|
||||||
boolean theForHistoryOperation) {
|
|
||||||
|
// 1. get resource, it's encoding and the tags if any
|
||||||
ResourceHistoryTable history;
|
byte[] resourceBytes = null;
|
||||||
|
ResourceEncodingEnum resourceEncoding = null;
|
||||||
|
Collection<? extends BaseTag> myTagList = null;
|
||||||
|
|
||||||
if (theEntity instanceof ResourceHistoryTable) {
|
if (theEntity instanceof ResourceHistoryTable) {
|
||||||
history = (ResourceHistoryTable) theEntity;
|
ResourceHistoryTable history = (ResourceHistoryTable) theEntity;
|
||||||
|
resourceBytes = history.getResource();
|
||||||
|
resourceEncoding = history.getEncoding();
|
||||||
|
myTagList = history.getTags();
|
||||||
|
} else if (theEntity instanceof ResourceTable) {
|
||||||
|
ResourceTable resource = (ResourceTable)theEntity;
|
||||||
|
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
|
||||||
|
if (history == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
resourceBytes = history.getResource();
|
||||||
|
resourceEncoding = history.getEncoding();
|
||||||
|
myTagList = resource.getTags();
|
||||||
|
} else if (theEntity instanceof ResourceSearchView) {
|
||||||
|
// This is the search View
|
||||||
|
ResourceSearchView myView = (ResourceSearchView)theEntity;
|
||||||
|
resourceBytes = myView.getResource();
|
||||||
|
resourceEncoding = myView.getEncoding();
|
||||||
|
if (theTagList == null)
|
||||||
|
myTagList = new HashSet<>();
|
||||||
|
else
|
||||||
|
myTagList = theTagList;
|
||||||
} else {
|
} else {
|
||||||
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
|
// something wrong
|
||||||
}
|
|
||||||
|
|
||||||
if (history == null) {
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
byte[] resourceBytes = history.getResource();
|
// 2. get The text
|
||||||
ResourceEncodingEnum resourceEncoding = history.getEncoding();
|
|
||||||
|
|
||||||
String resourceText = null;
|
String resourceText = null;
|
||||||
switch (resourceEncoding) {
|
switch (resourceEncoding) {
|
||||||
case JSON:
|
case JSON:
|
||||||
try {
|
try {
|
||||||
resourceText = new String(resourceBytes, "UTF-8");
|
resourceText = new String(resourceBytes, "UTF-8");
|
||||||
} catch (UnsupportedEncodingException e) {
|
} catch (UnsupportedEncodingException e) {
|
||||||
|
@ -1638,13 +1676,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
case DEL:
|
case DEL:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
// 3. Use the appropriate custom type if one is specified in the context
|
||||||
* Use the appropriate custom type if one is specified in the context
|
|
||||||
*/
|
|
||||||
Class<R> resourceType = theResourceType;
|
Class<R> resourceType = theResourceType;
|
||||||
if (myContext.hasDefaultTypeForProfile()) {
|
if (myContext.hasDefaultTypeForProfile()) {
|
||||||
for (BaseTag nextTag : theEntity.getTags()) {
|
for (BaseTag nextTag : myTagList) {
|
||||||
if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
|
if (nextTag.getTag().getTagType() == TagTypeEnum.PROFILE) {
|
||||||
String profile = nextTag.getTag().getCode();
|
String profile = nextTag.getTag().getCode();
|
||||||
if (isNotBlank(profile)) {
|
if (isNotBlank(profile)) {
|
||||||
|
@ -1659,6 +1695,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 4. parse the text to FHIR
|
||||||
R retVal;
|
R retVal;
|
||||||
if (resourceEncoding != ResourceEncodingEnum.DEL) {
|
if (resourceEncoding != ResourceEncodingEnum.DEL) {
|
||||||
IParser parser = resourceEncoding.newParser(getContext(theEntity.getFhirVersion()));
|
IParser parser = resourceEncoding.newParser(getContext(theEntity.getFhirVersion()));
|
||||||
|
@ -1689,15 +1726,15 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 5. fill MetaData
|
||||||
if (retVal instanceof IResource) {
|
if (retVal instanceof IResource) {
|
||||||
IResource res = (IResource) retVal;
|
IResource res = (IResource) retVal;
|
||||||
retVal = populateResourceMetadataHapi(resourceType, theEntity, theForHistoryOperation, res);
|
retVal = populateResourceMetadataHapi(resourceType, theEntity, myTagList, theForHistoryOperation, res);
|
||||||
} else {
|
} else {
|
||||||
IAnyResource res = (IAnyResource) retVal;
|
IAnyResource res = (IAnyResource) retVal;
|
||||||
retVal = populateResourceMetadataRi(resourceType, theEntity, theForHistoryOperation, res);
|
retVal = populateResourceMetadataRi(resourceType, theEntity, myTagList, theForHistoryOperation, res);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1735,8 +1772,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
protected ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, ResourceTable
|
protected ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, ResourceTable
|
||||||
theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
|
theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
|
||||||
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
||||||
|
Validate.notNull(theEntity);
|
||||||
|
Validate.isTrue(theDeletedTimestampOrNull != null || theResource != null, "Must have either a resource[{}] or a deleted timestamp[{}] for resource PID[{}]", theDeletedTimestampOrNull != null, theResource != null, theEntity.getId());
|
||||||
|
|
||||||
ourLog.debug("Starting entity update");
|
ourLog.debug("Starting entity update");
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This should be the very first thing..
|
* This should be the very first thing..
|
||||||
*/
|
*/
|
||||||
|
@ -1826,6 +1867,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
theEntity.setNarrativeTextParsedIntoWords(null);
|
theEntity.setNarrativeTextParsedIntoWords(null);
|
||||||
theEntity.setContentTextParsedIntoWords(null);
|
theEntity.setContentTextParsedIntoWords(null);
|
||||||
theEntity.setHashSha256(null);
|
theEntity.setHashSha256(null);
|
||||||
|
theEntity.setIndexStatus(INDEX_STATUS_INDEXED);
|
||||||
changed = populateResourceIntoEntity(theRequest, theResource, theEntity, true);
|
changed = populateResourceIntoEntity(theRequest, theResource, theEntity, true);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -2003,6 +2045,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
|
|
||||||
postPersist(theEntity, (T) theResource);
|
postPersist(theEntity, (T) theResource);
|
||||||
|
|
||||||
|
} else if (theEntity.getDeleted() != null) {
|
||||||
|
|
||||||
|
postDelete(theEntity);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
theEntity = myEntityManager.merge(theEntity);
|
theEntity = myEntityManager.merge(theEntity);
|
||||||
|
|
||||||
|
@ -2057,6 +2103,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
|
||||||
if (thePerformIndexing) {
|
if (thePerformIndexing) {
|
||||||
|
|
||||||
for (ResourceIndexedSearchParamString next : removeCommon(existingStringParams, stringParams)) {
|
for (ResourceIndexedSearchParamString next : removeCommon(existingStringParams, stringParams)) {
|
||||||
|
next.setDaoConfig(myConfig);
|
||||||
myEntityManager.remove(next);
|
myEntityManager.remove(next);
|
||||||
theEntity.getParamsString().remove(next);
|
theEntity.getParamsString().remove(next);
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao;
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
* You may obtain a copy of the License at
|
* You may obtain a copy of the License at
|
||||||
*
|
*
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
*
|
*
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@ -50,6 +50,7 @@ import ca.uhn.fhir.rest.server.method.SearchMethodBinding;
|
||||||
import ca.uhn.fhir.util.*;
|
import ca.uhn.fhir.util.*;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.hl7.fhir.instance.model.api.*;
|
import org.hl7.fhir.instance.model.api.*;
|
||||||
|
import org.hl7.fhir.r4.model.InstantType;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.beans.factory.annotation.Required;
|
import org.springframework.beans.factory.annotation.Required;
|
||||||
import org.springframework.lang.NonNull;
|
import org.springframework.lang.NonNull;
|
||||||
|
@ -207,7 +208,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
|
|
||||||
StopWatch w = new StopWatch();
|
StopWatch w = new StopWatch();
|
||||||
|
|
||||||
T resourceToDelete = toResource(myResourceType, entity, false);
|
T resourceToDelete = toResource(myResourceType, entity, null, false);
|
||||||
|
|
||||||
// Notify IServerOperationInterceptors about pre-action call
|
// Notify IServerOperationInterceptors about pre-action call
|
||||||
if (theReques != null) {
|
if (theReques != null) {
|
||||||
|
@ -289,7 +290,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
ResourceTable entity = myEntityManager.find(ResourceTable.class, pid);
|
ResourceTable entity = myEntityManager.find(ResourceTable.class, pid);
|
||||||
deletedResources.add(entity);
|
deletedResources.add(entity);
|
||||||
|
|
||||||
T resourceToDelete = toResource(myResourceType, entity, false);
|
T resourceToDelete = toResource(myResourceType, entity, null, false);
|
||||||
|
|
||||||
// Notify IServerOperationInterceptors about pre-action call
|
// Notify IServerOperationInterceptors about pre-action call
|
||||||
if (theRequest != null) {
|
if (theRequest != null) {
|
||||||
|
@ -517,6 +518,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@Transactional(propagation = Propagation.NEVER)
|
||||||
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions) {
|
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions) {
|
||||||
BaseHasResource entity = readEntity(theId);
|
BaseHasResource entity = readEntity(theId);
|
||||||
if (theId.hasVersionIdPart()) {
|
if (theId.hasVersionIdPart()) {
|
||||||
|
@ -532,6 +534,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@Transactional(propagation = Propagation.NEVER)
|
||||||
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions) {
|
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions) {
|
||||||
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
|
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
|
||||||
|
|
||||||
|
@ -854,16 +857,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
BaseHasResource entity = readEntity(theId);
|
BaseHasResource entity = readEntity(theId);
|
||||||
validateResourceType(entity);
|
validateResourceType(entity);
|
||||||
|
|
||||||
T retVal = toResource(myResourceType, entity, false);
|
T retVal = toResource(myResourceType, entity, null, false);
|
||||||
|
|
||||||
IPrimitiveType<Date> deleted;
|
if (entity.getDeleted() != null) {
|
||||||
if (retVal instanceof IResource) {
|
throw new ResourceGoneException("Resource was deleted at " + new InstantType(entity.getDeleted()).getValueAsString());
|
||||||
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) retVal);
|
|
||||||
} else {
|
|
||||||
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) retVal);
|
|
||||||
}
|
|
||||||
if (deleted != null && !deleted.isEmpty()) {
|
|
||||||
throw new ResourceGoneException("Resource was deleted at " + deleted.getValueAsString());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ourLog.debug("Processed read on {} in {}ms", theId.getValue(), w.getMillisAndRestart());
|
ourLog.debug("Processed read on {} in {}ms", theId.getValue(), w.getMillisAndRestart());
|
||||||
|
@ -930,10 +927,14 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void reindex(T theResource, ResourceTable theEntity) {
|
public void reindex(T theResource, ResourceTable theEntity) {
|
||||||
ourLog.debug("Indexing resource {} - PID {}", theResource.getIdElement().getValue(), theEntity.getId());
|
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getId());
|
||||||
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
|
if (theResource != null) {
|
||||||
updateEntity(null, theResource, theEntity, null, true, false, theEntity.getUpdatedDate(), true, false);
|
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
|
||||||
CURRENTLY_REINDEXING.put(theResource, null);
|
}
|
||||||
|
updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, theEntity.getUpdatedDate(), true, false);
|
||||||
|
if (theResource != null) {
|
||||||
|
CURRENTLY_REINDEXING.put(theResource, null);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1065,6 +1066,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
mySecondaryPrimaryKeyParamName = theSecondaryPrimaryKeyParamName;
|
mySecondaryPrimaryKeyParamName = theSecondaryPrimaryKeyParamName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void start() {
|
||||||
|
ourLog.debug("Starting resource DAO for type: {}", getResourceName());
|
||||||
|
}
|
||||||
|
|
||||||
protected <MT extends IBaseMetaType> MT toMetaDt(Class<MT> theType, Collection<TagDefinition> tagDefinitions) {
|
protected <MT extends IBaseMetaType> MT toMetaDt(Class<MT> theType, Collection<TagDefinition> tagDefinitions) {
|
||||||
MT retVal;
|
MT retVal;
|
||||||
try {
|
try {
|
||||||
|
@ -1336,9 +1342,4 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@PostConstruct
|
|
||||||
public void start() {
|
|
||||||
ourLog.debug("Starting resource DAO for type: {}", getResourceName());
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -305,7 +305,8 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
|
||||||
|
|
||||||
final IBaseResource resource = toResource(resourceTable, false);
|
final IBaseResource resource = toResource(resourceTable, false);
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resource.getClass());
|
Class<? extends IBaseResource> resourceClass = getContext().getResourceDefinition(resourceTable.getResourceType()).getImplementingClass();
|
||||||
|
@SuppressWarnings("rawtypes") final IFhirResourceDao dao = getDaoOrThrowException(resourceClass);
|
||||||
dao.reindex(resource, resourceTable);
|
dao.reindex(resource, resourceTable);
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
|
|
|
@ -20,45 +20,43 @@ package ca.uhn.fhir.jpa.dao;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import java.util.Collection;
|
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import java.util.Collections;
|
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||||
import java.util.List;
|
import ca.uhn.fhir.util.FhirTerser;
|
||||||
import java.util.regex.Pattern;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
|
||||||
import org.apache.commons.lang3.ObjectUtils;
|
import org.apache.commons.lang3.ObjectUtils;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
import org.hl7.fhir.instance.model.api.IBaseDatatype;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseExtension;
|
import org.hl7.fhir.instance.model.api.IBaseExtension;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import java.util.Collections;
|
||||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
import java.util.List;
|
||||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
import java.util.regex.Pattern;
|
||||||
import ca.uhn.fhir.util.FhirTerser;
|
|
||||||
|
|
||||||
public abstract class BaseSearchParamExtractor implements ISearchParamExtractor {
|
public abstract class BaseSearchParamExtractor implements ISearchParamExtractor {
|
||||||
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
|
|
||||||
public static final Pattern SPLIT = Pattern.compile("\\||( or )");
|
|
||||||
|
|
||||||
|
public static final Pattern SPLIT = Pattern.compile("\\||( or )");
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseSearchParamExtractor.class);
|
||||||
@Autowired
|
@Autowired
|
||||||
private FhirContext myContext;
|
private FhirContext myContext;
|
||||||
|
@Autowired
|
||||||
|
private DaoConfig myDaoConfig;
|
||||||
@Autowired
|
@Autowired
|
||||||
private ISearchParamRegistry mySearchParamRegistry;
|
private ISearchParamRegistry mySearchParamRegistry;
|
||||||
|
|
||||||
public BaseSearchParamExtractor() {
|
public BaseSearchParamExtractor() {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
public BaseSearchParamExtractor(FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
|
public BaseSearchParamExtractor(DaoConfig theDaoConfig, FhirContext theCtx, ISearchParamRegistry theSearchParamRegistry) {
|
||||||
myContext = theCtx;
|
myContext = theCtx;
|
||||||
mySearchParamRegistry = theSearchParamRegistry;
|
mySearchParamRegistry = theSearchParamRegistry;
|
||||||
|
myDaoConfig = theDaoConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<PathAndRef> extractResourceLinks(IBaseResource theResource, RuntimeSearchParam theNextSpDef) {
|
public List<PathAndRef> extractResourceLinks(IBaseResource theResource, RuntimeSearchParam theNextSpDef) {
|
||||||
List<PathAndRef> refs = new ArrayList<PathAndRef>();
|
List<PathAndRef> refs = new ArrayList<PathAndRef>();
|
||||||
|
@ -95,20 +93,24 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
|
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResource);
|
||||||
ourLog.warn("Failed to index values from path[{}] in resource type[{}]: {}", new Object[] { nextPathTrimmed, def.getName(), e.toString(), e } );
|
ourLog.warn("Failed to index values from path[{}] in resource type[{}]: {}", new Object[] {nextPathTrimmed, def.getName(), e.toString(), e});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return values;
|
return values;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected FhirContext getContext() {
|
protected FhirContext getContext() {
|
||||||
return myContext;
|
return myContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public DaoConfig getDaoConfig() {
|
||||||
|
return myDaoConfig;
|
||||||
|
}
|
||||||
|
|
||||||
public Collection<RuntimeSearchParam> getSearchParams(IBaseResource theResource) {
|
public Collection<RuntimeSearchParam> getSearchParams(IBaseResource theResource) {
|
||||||
RuntimeResourceDefinition def = getContext().getResourceDefinition(theResource);
|
RuntimeResourceDefinition def = getContext().getResourceDefinition(theResource);
|
||||||
Collection<RuntimeSearchParam> retVal = mySearchParamRegistry.getActiveSearchParams(def.getName()).values();
|
Collection<RuntimeSearchParam> retVal = mySearchParamRegistry.getActiveSearchParams(def.getName()).values();
|
||||||
List<RuntimeSearchParam> defaultList= Collections.emptyList();
|
List<RuntimeSearchParam> defaultList = Collections.emptyList();
|
||||||
retVal = ObjectUtils.defaultIfNull(retVal, defaultList);
|
retVal = ObjectUtils.defaultIfNull(retVal, defaultList);
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,7 +89,7 @@ public class DaoConfig {
|
||||||
/**
|
/**
|
||||||
* update setter javadoc if default changes
|
* update setter javadoc if default changes
|
||||||
*/
|
*/
|
||||||
private boolean myAllowContainsSearches = true;
|
private boolean myAllowContainsSearches = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* update setter javadoc if default changes
|
* update setter javadoc if default changes
|
||||||
|
@ -754,7 +754,15 @@ public class DaoConfig {
|
||||||
* If enabled, the server will support the use of :contains searches,
|
* If enabled, the server will support the use of :contains searches,
|
||||||
* which are helpful but can have adverse effects on performance.
|
* which are helpful but can have adverse effects on performance.
|
||||||
* <p>
|
* <p>
|
||||||
* Default is <code>true</code>
|
* Default is <code>false</code> (Note that prior to HAPI FHIR
|
||||||
|
* 3.5.0 the default was <code>true</code>)
|
||||||
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* Note: If you change this value after data already has
|
||||||
|
* already been stored in the database, you must for a reindexing
|
||||||
|
* of all data in the database or resources may not be
|
||||||
|
* searchable.
|
||||||
|
* </p>
|
||||||
*/
|
*/
|
||||||
public boolean isAllowContainsSearches() {
|
public boolean isAllowContainsSearches() {
|
||||||
return myAllowContainsSearches;
|
return myAllowContainsSearches;
|
||||||
|
@ -764,12 +772,21 @@ public class DaoConfig {
|
||||||
* If enabled, the server will support the use of :contains searches,
|
* If enabled, the server will support the use of :contains searches,
|
||||||
* which are helpful but can have adverse effects on performance.
|
* which are helpful but can have adverse effects on performance.
|
||||||
* <p>
|
* <p>
|
||||||
* Default is <code>true</code>
|
* Default is <code>false</code> (Note that prior to HAPI FHIR
|
||||||
|
* 3.5.0 the default was <code>true</code>)
|
||||||
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* Note: If you change this value after data already has
|
||||||
|
* already been stored in the database, you must for a reindexing
|
||||||
|
* of all data in the database or resources may not be
|
||||||
|
* searchable.
|
||||||
|
* </p>
|
||||||
*/
|
*/
|
||||||
public void setAllowContainsSearches(boolean theAllowContainsSearches) {
|
public void setAllowContainsSearches(boolean theAllowContainsSearches) {
|
||||||
this.myAllowContainsSearches = theAllowContainsSearches;
|
this.myAllowContainsSearches = theAllowContainsSearches;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If set to <code>true</code> (default is <code>false</code>) the server will allow
|
* If set to <code>true</code> (default is <code>false</code>) the server will allow
|
||||||
* resources to have references to external servers. For example if this server is
|
* resources to have references to external servers. For example if this server is
|
||||||
|
|
|
@ -28,6 +28,7 @@ import java.util.*;
|
||||||
import javax.annotation.PostConstruct;
|
import javax.annotation.PostConstruct;
|
||||||
|
|
||||||
import org.apache.commons.codec.binary.StringUtils;
|
import org.apache.commons.codec.binary.StringUtils;
|
||||||
|
import org.hl7.fhir.instance.hapi.validation.CachingValidationSupport;
|
||||||
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
|
import org.hl7.fhir.instance.hapi.validation.DefaultProfileValidationSupport;
|
||||||
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
|
import org.hl7.fhir.instance.hapi.validation.ValidationSupportChain;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
@ -62,7 +63,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
|
||||||
@Qualifier("myFhirContextDstu2Hl7Org")
|
@Qualifier("myFhirContextDstu2Hl7Org")
|
||||||
private FhirContext myRiCtx;
|
private FhirContext myRiCtx;
|
||||||
|
|
||||||
private ValidationSupportChain myValidationSupport;
|
private CachingValidationSupport myValidationSupport;
|
||||||
|
|
||||||
private void addCompose(String theFilter, ValueSet theValueSetToPopulate, ValueSet theSourceValueSet, CodeSystemConcept theConcept) {
|
private void addCompose(String theFilter, ValueSet theValueSetToPopulate, ValueSet theSourceValueSet, CodeSystemConcept theConcept) {
|
||||||
if (isBlank(theFilter)) {
|
if (isBlank(theFilter)) {
|
||||||
|
@ -252,7 +253,7 @@ public class FhirResourceDaoValueSetDstu2 extends FhirResourceDaoDstu2<ValueSet>
|
||||||
public void postConstruct() {
|
public void postConstruct() {
|
||||||
super.postConstruct();
|
super.postConstruct();
|
||||||
myDefaultProfileValidationSupport = new DefaultProfileValidationSupport();
|
myDefaultProfileValidationSupport = new DefaultProfileValidationSupport();
|
||||||
myValidationSupport = new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport);
|
myValidationSupport = new CachingValidationSupport(new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -1,15 +1,18 @@
|
||||||
package ca.uhn.fhir.jpa.dao;
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||||
import ca.uhn.fhir.jpa.entity.BaseHasResource;
|
import ca.uhn.fhir.jpa.entity.BaseHasResource;
|
||||||
|
import ca.uhn.fhir.jpa.entity.IBaseResourceEntity;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.jpa.entity.ResourceTag;
|
||||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
|
||||||
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
|
@ -56,6 +59,6 @@ public interface IDao {
|
||||||
|
|
||||||
IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation);
|
IBaseResource toResource(BaseHasResource theEntity, boolean theForHistoryOperation);
|
||||||
|
|
||||||
<R extends IBaseResource> R toResource(Class<R> theResourceType, BaseHasResource theEntity, boolean theForHistoryOperation);
|
<R extends IBaseResource> R toResource(Class<R> theResourceType, IBaseResourceEntity theEntity, Collection<ResourceTag> theTagList, boolean theForHistoryOperation);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao;
|
||||||
import ca.uhn.fhir.context.*;
|
import ca.uhn.fhir.context.*;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
|
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
|
||||||
import ca.uhn.fhir.jpa.entity.*;
|
import ca.uhn.fhir.jpa.entity.*;
|
||||||
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
|
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
|
||||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||||
|
@ -53,7 +55,6 @@ import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import org.apache.commons.lang3.ObjectUtils;
|
import org.apache.commons.lang3.ObjectUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
|
@ -61,6 +62,8 @@ import org.apache.commons.lang3.tuple.Pair;
|
||||||
import org.hibernate.ScrollMode;
|
import org.hibernate.ScrollMode;
|
||||||
import org.hibernate.ScrollableResults;
|
import org.hibernate.ScrollableResults;
|
||||||
import org.hibernate.query.Query;
|
import org.hibernate.query.Query;
|
||||||
|
import org.hibernate.query.criteria.internal.CriteriaBuilderImpl;
|
||||||
|
import org.hibernate.query.criteria.internal.predicate.BooleanStaticAssertionPredicate;
|
||||||
import org.hl7.fhir.dstu3.model.BaseResource;
|
import org.hl7.fhir.dstu3.model.BaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
@ -69,7 +72,6 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import javax.persistence.EntityManager;
|
import javax.persistence.EntityManager;
|
||||||
import javax.persistence.TypedQuery;
|
import javax.persistence.TypedQuery;
|
||||||
import javax.persistence.criteria.*;
|
import javax.persistence.criteria.*;
|
||||||
import javax.persistence.criteria.CriteriaBuilder.In;
|
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.math.MathContext;
|
import java.math.MathContext;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
@ -108,12 +110,17 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
private IHapiTerminologySvc myTerminologySvc;
|
private IHapiTerminologySvc myTerminologySvc;
|
||||||
private int myFetchSize;
|
private int myFetchSize;
|
||||||
|
|
||||||
|
protected IResourceTagDao myResourceTagDao;
|
||||||
|
protected IResourceSearchViewDao myResourceSearchViewDao;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
public SearchBuilder(FhirContext theFhirContext, EntityManager theEntityManager, IFulltextSearchSvc theFulltextSearchSvc,
|
public SearchBuilder(FhirContext theFhirContext, EntityManager theEntityManager,
|
||||||
BaseHapiFhirDao<?> theDao,
|
IFulltextSearchSvc theFulltextSearchSvc, BaseHapiFhirDao<?> theDao,
|
||||||
IResourceIndexedSearchParamUriDao theResourceIndexedSearchParamUriDao, IForcedIdDao theForcedIdDao, IHapiTerminologySvc theTerminologySvc, ISearchParamRegistry theSearchParamRegistry) {
|
IResourceIndexedSearchParamUriDao theResourceIndexedSearchParamUriDao, IForcedIdDao theForcedIdDao,
|
||||||
|
IHapiTerminologySvc theTerminologySvc, ISearchParamRegistry theSearchParamRegistry,
|
||||||
|
IResourceTagDao theResourceTagDao, IResourceSearchViewDao theResourceViewDao) {
|
||||||
myContext = theFhirContext;
|
myContext = theFhirContext;
|
||||||
myEntityManager = theEntityManager;
|
myEntityManager = theEntityManager;
|
||||||
myFulltextSearchSvc = theFulltextSearchSvc;
|
myFulltextSearchSvc = theFulltextSearchSvc;
|
||||||
|
@ -122,6 +129,8 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
myForcedIdDao = theForcedIdDao;
|
myForcedIdDao = theForcedIdDao;
|
||||||
myTerminologySvc = theTerminologySvc;
|
myTerminologySvc = theTerminologySvc;
|
||||||
mySearchParamRegistry = theSearchParamRegistry;
|
mySearchParamRegistry = theSearchParamRegistry;
|
||||||
|
myResourceTagDao = theResourceTagDao;
|
||||||
|
myResourceSearchViewDao = theResourceViewDao;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addPredicateComposite(String theResourceName, RuntimeSearchParam theParamDef, List<? extends IQueryParameterType> theNextAnd) {
|
private void addPredicateComposite(String theResourceName, RuntimeSearchParam theParamDef, List<? extends IQueryParameterType> theNextAnd) {
|
||||||
|
@ -257,7 +266,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Predicate> codePredicates = new ArrayList<Predicate>();
|
List<Predicate> codePredicates = new ArrayList<>();
|
||||||
for (IQueryParameterType nextOr : theList) {
|
for (IQueryParameterType nextOr : theList) {
|
||||||
IQueryParameterType params = nextOr;
|
IQueryParameterType params = nextOr;
|
||||||
|
|
||||||
|
@ -273,8 +282,9 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
ParamPrefixEnum prefix = ObjectUtils.defaultIfNull(param.getPrefix(), ParamPrefixEnum.EQUAL);
|
ParamPrefixEnum prefix = ObjectUtils.defaultIfNull(param.getPrefix(), ParamPrefixEnum.EQUAL);
|
||||||
String invalidMessageName = "invalidNumberPrefix";
|
String invalidMessageName = "invalidNumberPrefix";
|
||||||
|
|
||||||
Predicate num = createPredicateNumeric(theResourceName, theParamName, join, myBuilder, params, prefix, value, fromObj, invalidMessageName);
|
Predicate predicateNumeric = createPredicateNumeric(theResourceName, theParamName, join, myBuilder, params, prefix, value, fromObj, invalidMessageName);
|
||||||
codePredicates.add(num);
|
Predicate predicateOuter = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, predicateNumeric );
|
||||||
|
codePredicates.add(predicateOuter);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("Invalid token type: " + params.getClass());
|
throw new IllegalArgumentException("Invalid token type: " + params.getClass());
|
||||||
|
@ -287,11 +297,10 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
|
|
||||||
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing) {
|
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing) {
|
||||||
Join<ResourceTable, SearchParamPresent> paramPresentJoin = myResourceTableRoot.join("mySearchParamPresents", JoinType.LEFT);
|
Join<ResourceTable, SearchParamPresent> paramPresentJoin = myResourceTableRoot.join("mySearchParamPresents", JoinType.LEFT);
|
||||||
Join<SearchParamPresent, SearchParam> paramJoin = paramPresentJoin.join("mySearchParam", JoinType.LEFT);
|
|
||||||
|
|
||||||
myPredicates.add(myBuilder.equal(paramJoin.get("myResourceName"), theResourceName));
|
Expression<Long> hashPresence = paramPresentJoin.get("myHashPresence").as(Long.class);
|
||||||
myPredicates.add(myBuilder.equal(paramJoin.get("myParamName"), theParamName));
|
Long hash = SearchParamPresent.calculateHashPresence(theResourceName, theParamName, !theMissing);
|
||||||
myPredicates.add(myBuilder.equal(paramPresentJoin.get("myPresent"), !theMissing));
|
myPredicates.add(myBuilder.equal(hashPresence, hash));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing, Join<ResourceTable, ? extends BaseResourceIndexedSearchParam> theJoin) {
|
private void addPredicateParamMissing(String theResourceName, String theParamName, boolean theMissing, Join<ResourceTable, ? extends BaseResourceIndexedSearchParam> theJoin) {
|
||||||
|
@ -309,7 +318,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Predicate> codePredicates = new ArrayList<Predicate>();
|
List<Predicate> codePredicates = new ArrayList<>();
|
||||||
for (IQueryParameterType nextOr : theList) {
|
for (IQueryParameterType nextOr : theList) {
|
||||||
|
|
||||||
Predicate singleCode = createPredicateQuantity(nextOr, theResourceName, theParamName, myBuilder, join);
|
Predicate singleCode = createPredicateQuantity(nextOr, theResourceName, theParamName, myBuilder, join);
|
||||||
|
@ -332,7 +341,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
|
|
||||||
Join<ResourceTable, ResourceLink> join = createOrReuseJoin(JoinEnum.REFERENCE, theParamName);
|
Join<ResourceTable, ResourceLink> join = createOrReuseJoin(JoinEnum.REFERENCE, theParamName);
|
||||||
|
|
||||||
List<Predicate> codePredicates = new ArrayList<Predicate>();
|
List<Predicate> codePredicates = new ArrayList<>();
|
||||||
|
|
||||||
for (IQueryParameterType nextOr : theList) {
|
for (IQueryParameterType nextOr : theList) {
|
||||||
|
|
||||||
|
@ -429,7 +438,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
RuntimeResourceDefinition resDef = myContext.getResourceDefinition(ref.getResourceType());
|
RuntimeResourceDefinition resDef = myContext.getResourceDefinition(ref.getResourceType());
|
||||||
resourceTypes = new ArrayList<Class<? extends IBaseResource>>(1);
|
resourceTypes = new ArrayList<>(1);
|
||||||
resourceTypes.add(resDef.getImplementingClass());
|
resourceTypes.add(resDef.getImplementingClass());
|
||||||
resourceId = ref.getIdPart();
|
resourceId = ref.getIdPart();
|
||||||
}
|
}
|
||||||
|
@ -474,7 +483,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
IQueryParameterType chainValue;
|
IQueryParameterType chainValue;
|
||||||
if (remainingChain != null) {
|
if (remainingChain != null) {
|
||||||
if (param == null || param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) {
|
if (param == null || param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) {
|
||||||
ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", new Object[] {nextType.getSimpleName(), chain, remainingChain});
|
ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", nextType.getSimpleName(), chain, remainingChain);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -495,7 +504,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
Root<ResourceTable> subQfrom = subQ.from(ResourceTable.class);
|
Root<ResourceTable> subQfrom = subQ.from(ResourceTable.class);
|
||||||
subQ.select(subQfrom.get("myId").as(Long.class));
|
subQ.select(subQfrom.get("myId").as(Long.class));
|
||||||
|
|
||||||
List<List<? extends IQueryParameterType>> andOrParams = new ArrayList<List<? extends IQueryParameterType>>();
|
List<List<? extends IQueryParameterType>> andOrParams = new ArrayList<>();
|
||||||
andOrParams.add(Collections.singletonList(chainValue));
|
andOrParams.add(Collections.singletonList(chainValue));
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -546,7 +555,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
|
|
||||||
private void addPredicateResourceId(List<List<? extends IQueryParameterType>> theValues) {
|
private void addPredicateResourceId(List<List<? extends IQueryParameterType>> theValues) {
|
||||||
for (List<? extends IQueryParameterType> nextValue : theValues) {
|
for (List<? extends IQueryParameterType> nextValue : theValues) {
|
||||||
Set<Long> orPids = new HashSet<Long>();
|
Set<Long> orPids = new HashSet<>();
|
||||||
for (IQueryParameterType next : nextValue) {
|
for (IQueryParameterType next : nextValue) {
|
||||||
String value = next.getValueAsQueryToken(myContext);
|
String value = next.getValueAsQueryToken(myContext);
|
||||||
if (value != null && value.startsWith("|")) {
|
if (value != null && value.startsWith("|")) {
|
||||||
|
@ -594,10 +603,9 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Predicate> codePredicates = new ArrayList<Predicate>();
|
List<Predicate> codePredicates = new ArrayList<>();
|
||||||
for (IQueryParameterType nextOr : theList) {
|
for (IQueryParameterType nextOr : theList) {
|
||||||
IQueryParameterType theParameter = nextOr;
|
Predicate singleCode = createPredicateString(nextOr, theResourceName, theParamName, myBuilder, join);
|
||||||
Predicate singleCode = createPredicateString(theParameter, theResourceName, theParamName, myBuilder, join);
|
|
||||||
codePredicates.add(singleCode);
|
codePredicates.add(singleCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -742,7 +750,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Predicate> codePredicates = new ArrayList<Predicate>();
|
List<Predicate> codePredicates = new ArrayList<>();
|
||||||
for (IQueryParameterType nextOr : theList) {
|
for (IQueryParameterType nextOr : theList) {
|
||||||
|
|
||||||
if (nextOr instanceof TokenParam) {
|
if (nextOr instanceof TokenParam) {
|
||||||
|
@ -785,7 +793,6 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
Predicate predicate;
|
|
||||||
if (param.getQualifier() == UriParamQualifierEnum.ABOVE) {
|
if (param.getQualifier() == UriParamQualifierEnum.ABOVE) {
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -814,14 +821,24 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
predicate = join.get("myUri").as(String.class).in(toFind);
|
Predicate uriPredicate = join.get("myUri").as(String.class).in(toFind);
|
||||||
|
Predicate hashAndUriPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, uriPredicate);
|
||||||
|
codePredicates.add(hashAndUriPredicate);
|
||||||
|
|
||||||
} else if (param.getQualifier() == UriParamQualifierEnum.BELOW) {
|
} else if (param.getQualifier() == UriParamQualifierEnum.BELOW) {
|
||||||
predicate = myBuilder.like(join.get("myUri").as(String.class), createLeftMatchLikeExpression(value));
|
|
||||||
|
Predicate uriPredicate = myBuilder.like(join.get("myUri").as(String.class), createLeftMatchLikeExpression(value));
|
||||||
|
Predicate hashAndUriPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, uriPredicate);
|
||||||
|
codePredicates.add(hashAndUriPredicate);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
predicate = myBuilder.equal(join.get("myUri").as(String.class), value);
|
|
||||||
|
long hashUri = ResourceIndexedSearchParamUri.calculateHashUri(theResourceName, theParamName, value);
|
||||||
|
Predicate hashPredicate = myBuilder.equal(join.get("myHashUri"), hashUri);
|
||||||
|
codePredicates.add(hashPredicate);
|
||||||
|
|
||||||
}
|
}
|
||||||
codePredicates.add(predicate);
|
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("Invalid URI type: " + nextOr.getClass());
|
throw new IllegalArgumentException("Invalid URI type: " + nextOr.getClass());
|
||||||
}
|
}
|
||||||
|
@ -839,16 +856,13 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
Predicate orPredicate = myBuilder.or(toArray(codePredicates));
|
Predicate orPredicate = myBuilder.or(toArray(codePredicates));
|
||||||
|
myPredicates.add(orPredicate);
|
||||||
Predicate outerPredicate = combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, join, orPredicate);
|
|
||||||
myPredicates.add(outerPredicate);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Predicate combineParamIndexPredicateWithParamNamePredicate(String theResourceName, String theParamName, From<?, ? extends BaseResourceIndexedSearchParam> theFrom, Predicate thePredicate) {
|
private Predicate combineParamIndexPredicateWithParamNamePredicate(String theResourceName, String theParamName, From<?, ? extends BaseResourceIndexedSearchParam> theFrom, Predicate thePredicate) {
|
||||||
Predicate resourceTypePredicate = myBuilder.equal(theFrom.get("myResourceType"), theResourceName);
|
long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
|
||||||
Predicate paramNamePredicate = myBuilder.equal(theFrom.get("myParamName"), theParamName);
|
Predicate hashIdentityPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hashIdentity);
|
||||||
Predicate outerPredicate = myBuilder.and(resourceTypePredicate, paramNamePredicate, thePredicate);
|
return myBuilder.and(hashIdentityPredicate, thePredicate);
|
||||||
return outerPredicate;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Predicate createCompositeParamPart(String theResourceName, Root<ResourceTable> theRoot, RuntimeSearchParam theParam, IQueryParameterType leftValue) {
|
private Predicate createCompositeParamPart(String theResourceName, Root<ResourceTable> theRoot, RuntimeSearchParam theParam, IQueryParameterType leftValue) {
|
||||||
|
@ -1028,7 +1042,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
if (theParamName == null) {
|
if (theParamName == null) {
|
||||||
return num;
|
return num;
|
||||||
}
|
}
|
||||||
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, num);
|
return num;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Predicate createPredicateQuantity(IQueryParameterType theParam, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
|
private Predicate createPredicateQuantity(IQueryParameterType theParam, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
|
||||||
|
@ -1054,39 +1068,31 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
throw new IllegalArgumentException("Invalid quantity type: " + theParam.getClass());
|
throw new IllegalArgumentException("Invalid quantity type: " + theParam.getClass());
|
||||||
}
|
}
|
||||||
|
|
||||||
Predicate system = null;
|
Predicate hashPredicate;
|
||||||
if (!isBlank(systemValue)) {
|
if (!isBlank(systemValue) && !isBlank(unitsValue)) {
|
||||||
system = theBuilder.equal(theFrom.get("mySystem"), systemValue);
|
long hash = ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(theResourceName, theParamName, systemValue, unitsValue);
|
||||||
}
|
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentitySystemAndUnits"), hash);
|
||||||
|
} else if (!isBlank(unitsValue)) {
|
||||||
Predicate code = null;
|
long hash = ResourceIndexedSearchParamQuantity.calculateHashUnits(theResourceName, theParamName, unitsValue);
|
||||||
if (!isBlank(unitsValue)) {
|
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentityAndUnits"), hash);
|
||||||
code = theBuilder.equal(theFrom.get("myUnits"), unitsValue);
|
} else {
|
||||||
|
long hash = BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName);
|
||||||
|
hashPredicate = myBuilder.equal(theFrom.get("myHashIdentity"), hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
cmpValue = ObjectUtils.defaultIfNull(cmpValue, ParamPrefixEnum.EQUAL);
|
cmpValue = ObjectUtils.defaultIfNull(cmpValue, ParamPrefixEnum.EQUAL);
|
||||||
final Expression<BigDecimal> path = theFrom.get("myValue");
|
final Expression<BigDecimal> path = theFrom.get("myValue");
|
||||||
String invalidMessageName = "invalidQuantityPrefix";
|
String invalidMessageName = "invalidQuantityPrefix";
|
||||||
|
|
||||||
Predicate num = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
|
Predicate numericPredicate = createPredicateNumeric(theResourceName, null, theFrom, theBuilder, theParam, cmpValue, valueValue, path, invalidMessageName);
|
||||||
|
|
||||||
Predicate singleCode;
|
return theBuilder.and(hashPredicate, numericPredicate);
|
||||||
if (system == null && code == null) {
|
|
||||||
singleCode = num;
|
|
||||||
} else if (system == null) {
|
|
||||||
singleCode = theBuilder.and(code, num);
|
|
||||||
} else if (code == null) {
|
|
||||||
singleCode = theBuilder.and(system, num);
|
|
||||||
} else {
|
|
||||||
singleCode = theBuilder.and(system, code, num);
|
|
||||||
}
|
|
||||||
|
|
||||||
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Predicate createPredicateString(IQueryParameterType theParameter, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
|
private Predicate createPredicateString(IQueryParameterType theParameter, String theResourceName, String theParamName, CriteriaBuilder theBuilder,
|
||||||
From<?, ResourceIndexedSearchParamString> theFrom) {
|
From<?, ResourceIndexedSearchParamString> theFrom) {
|
||||||
String rawSearchTerm;
|
String rawSearchTerm;
|
||||||
|
DaoConfig daoConfig = myCallingDao.getConfig();
|
||||||
if (theParameter instanceof TokenParam) {
|
if (theParameter instanceof TokenParam) {
|
||||||
TokenParam id = (TokenParam) theParameter;
|
TokenParam id = (TokenParam) theParameter;
|
||||||
if (!id.isText()) {
|
if (!id.isText()) {
|
||||||
|
@ -1097,7 +1103,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
StringParam id = (StringParam) theParameter;
|
StringParam id = (StringParam) theParameter;
|
||||||
rawSearchTerm = id.getValue();
|
rawSearchTerm = id.getValue();
|
||||||
if (id.isContains()) {
|
if (id.isContains()) {
|
||||||
if (!myCallingDao.getConfig().isAllowContainsSearches()) {
|
if (!daoConfig.isAllowContainsSearches()) {
|
||||||
throw new MethodNotAllowedException(":contains modifier is disabled on this server");
|
throw new MethodNotAllowedException(":contains modifier is disabled on this server");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1113,22 +1119,34 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
+ ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm);
|
+ ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm);
|
||||||
}
|
}
|
||||||
|
|
||||||
String likeExpression = BaseHapiFhirDao.normalizeString(rawSearchTerm);
|
boolean exactMatch = theParameter instanceof StringParam && ((StringParam) theParameter).isExact();
|
||||||
if (theParameter instanceof StringParam &&
|
if (exactMatch) {
|
||||||
((StringParam) theParameter).isContains() &&
|
|
||||||
myCallingDao.getConfig().isAllowContainsSearches()) {
|
// Exact match
|
||||||
likeExpression = createLeftAndRightMatchLikeExpression(likeExpression);
|
|
||||||
|
Long hash = ResourceIndexedSearchParamString.calculateHashExact(theResourceName, theParamName, rawSearchTerm);
|
||||||
|
return theBuilder.equal(theFrom.get("myHashExact").as(Long.class), hash);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
likeExpression = createLeftMatchLikeExpression(likeExpression);
|
|
||||||
}
|
|
||||||
|
|
||||||
Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
|
// Normalized Match
|
||||||
if (theParameter instanceof StringParam && ((StringParam) theParameter).isExact()) {
|
|
||||||
Predicate exactCode = theBuilder.equal(theFrom.get("myValueExact"), rawSearchTerm);
|
|
||||||
singleCode = theBuilder.and(singleCode, exactCode);
|
|
||||||
}
|
|
||||||
|
|
||||||
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
|
String normalizedString = BaseHapiFhirDao.normalizeString(rawSearchTerm);
|
||||||
|
String likeExpression;
|
||||||
|
if (theParameter instanceof StringParam &&
|
||||||
|
((StringParam) theParameter).isContains() &&
|
||||||
|
daoConfig.isAllowContainsSearches()) {
|
||||||
|
likeExpression = createLeftAndRightMatchLikeExpression(normalizedString);
|
||||||
|
} else {
|
||||||
|
likeExpression = createLeftMatchLikeExpression(normalizedString);
|
||||||
|
}
|
||||||
|
|
||||||
|
Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(daoConfig, theResourceName, theParamName, normalizedString);
|
||||||
|
Predicate hashCode = theBuilder.equal(theFrom.get("myHashNormalizedPrefix").as(Long.class), hash);
|
||||||
|
Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
|
||||||
|
return theBuilder.and(hashCode, singleCode);
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<Predicate> createPredicateTagList(Path<TagDefinition> theDefJoin, CriteriaBuilder theBuilder, TagTypeEnum theTagType, List<Pair<String, String>> theTokens) {
|
private List<Predicate> createPredicateTagList(Path<TagDefinition> theDefJoin, CriteriaBuilder theBuilder, TagTypeEnum theTagType, List<Pair<String, String>> theTokens) {
|
||||||
|
@ -1183,7 +1201,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
* Process token modifiers (:in, :below, :above)
|
* Process token modifiers (:in, :below, :above)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
List<VersionIndependentConcept> codes = null;
|
List<VersionIndependentConcept> codes;
|
||||||
if (modifier == TokenParamModifier.IN) {
|
if (modifier == TokenParamModifier.IN) {
|
||||||
codes = myTerminologySvc.expandValueSet(code);
|
codes = myTerminologySvc.expandValueSet(code);
|
||||||
} else if (modifier == TokenParamModifier.ABOVE) {
|
} else if (modifier == TokenParamModifier.ABOVE) {
|
||||||
|
@ -1192,81 +1210,53 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
} else if (modifier == TokenParamModifier.BELOW) {
|
} else if (modifier == TokenParamModifier.BELOW) {
|
||||||
system = determineSystemIfMissing(theParamName, code, system);
|
system = determineSystemIfMissing(theParamName, code, system);
|
||||||
codes = myTerminologySvc.findCodesBelow(system, code);
|
codes = myTerminologySvc.findCodesBelow(system, code);
|
||||||
}
|
|
||||||
|
|
||||||
ArrayList<Predicate> singleCodePredicates = new ArrayList<>();
|
|
||||||
if (codes != null) {
|
|
||||||
|
|
||||||
if (codes.isEmpty()) {
|
|
||||||
|
|
||||||
// This will never match anything
|
|
||||||
Predicate codePredicate = theBuilder.isNull(theFrom.get("myMissing"));
|
|
||||||
singleCodePredicates.add(codePredicate);
|
|
||||||
|
|
||||||
} else {
|
|
||||||
List<Predicate> orPredicates = new ArrayList<Predicate>();
|
|
||||||
Map<String, List<VersionIndependentConcept>> map = new HashMap<String, List<VersionIndependentConcept>>();
|
|
||||||
for (VersionIndependentConcept nextCode : codes) {
|
|
||||||
List<VersionIndependentConcept> systemCodes = map.get(nextCode.getSystem());
|
|
||||||
if (null == systemCodes) {
|
|
||||||
systemCodes = new ArrayList<>();
|
|
||||||
map.put(nextCode.getSystem(), systemCodes);
|
|
||||||
}
|
|
||||||
systemCodes.add(nextCode);
|
|
||||||
}
|
|
||||||
// Use "in" in case of large numbers of codes due to param modifiers
|
|
||||||
final Path<String> systemExpression = theFrom.get("mySystem");
|
|
||||||
final Path<String> valueExpression = theFrom.get("myValue");
|
|
||||||
for (Map.Entry<String, List<VersionIndependentConcept>> entry : map.entrySet()) {
|
|
||||||
Predicate systemPredicate = theBuilder.equal(systemExpression, entry.getKey());
|
|
||||||
In<String> codePredicate = theBuilder.in(valueExpression);
|
|
||||||
for (VersionIndependentConcept nextCode : entry.getValue()) {
|
|
||||||
codePredicate.value(nextCode.getCode());
|
|
||||||
}
|
|
||||||
orPredicates.add(theBuilder.and(systemPredicate, codePredicate));
|
|
||||||
}
|
|
||||||
|
|
||||||
singleCodePredicates.add(theBuilder.or(orPredicates.toArray(new Predicate[orPredicates.size()])));
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
codes = Collections.singletonList(new VersionIndependentConcept(system, code));
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
if (codes.isEmpty()) {
|
||||||
* Ok, this is a normal query
|
// This will never match anything
|
||||||
*/
|
return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, false);
|
||||||
|
}
|
||||||
|
|
||||||
if (StringUtils.isNotBlank(system)) {
|
/*
|
||||||
if (modifier != null && modifier == TokenParamModifier.NOT) {
|
* Note: A null system value means "match any system", but
|
||||||
singleCodePredicates.add(theBuilder.notEqual(theFrom.get("mySystem"), system));
|
* an empty-string system value means "match values that
|
||||||
} else {
|
* explicitly have no system".
|
||||||
singleCodePredicates.add(theBuilder.equal(theFrom.get("mySystem"), system));
|
*/
|
||||||
}
|
boolean haveSystem = codes.get(0).getSystem() != null;
|
||||||
} else if (system == null) {
|
boolean haveCode = isNotBlank(codes.get(0).getCode());
|
||||||
// don't check the system
|
Expression<Long> hashField;
|
||||||
|
if (!haveSystem && !haveCode) {
|
||||||
|
// If we have neither, this isn't actually an expression so
|
||||||
|
// just return 1=1
|
||||||
|
return new BooleanStaticAssertionPredicate((CriteriaBuilderImpl) theBuilder, true);
|
||||||
|
} else if (haveSystem && haveCode) {
|
||||||
|
hashField = theFrom.get("myHashSystemAndValue").as(Long.class);
|
||||||
|
} else if (haveSystem) {
|
||||||
|
hashField = theFrom.get("myHashSystem").as(Long.class);
|
||||||
|
} else {
|
||||||
|
hashField = theFrom.get("myHashValue").as(Long.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
List<Long> values = new ArrayList<>(codes.size());
|
||||||
|
for (VersionIndependentConcept next : codes) {
|
||||||
|
if (haveSystem && haveCode) {
|
||||||
|
values.add(ResourceIndexedSearchParamToken.calculateHashSystemAndValue(theResourceName, theParamName, next.getSystem(), next.getCode()));
|
||||||
|
} else if (haveSystem) {
|
||||||
|
values.add(ResourceIndexedSearchParamToken.calculateHashSystem(theResourceName, theParamName, next.getSystem()));
|
||||||
} else {
|
} else {
|
||||||
// If the system is "", we only match on null systems
|
values.add(ResourceIndexedSearchParamToken.calculateHashValue(theResourceName, theParamName, next.getCode()));
|
||||||
singleCodePredicates.add(theBuilder.isNull(theFrom.get("mySystem")));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (StringUtils.isNotBlank(code)) {
|
|
||||||
if (modifier != null && modifier == TokenParamModifier.NOT) {
|
|
||||||
singleCodePredicates.add(theBuilder.notEqual(theFrom.get("myValue"), code));
|
|
||||||
} else {
|
|
||||||
singleCodePredicates.add(theBuilder.equal(theFrom.get("myValue"), code));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
/*
|
|
||||||
* As of HAPI FHIR 1.5, if the client searched for a token with a system but no specified value this means to
|
|
||||||
* match all tokens with the given value.
|
|
||||||
*
|
|
||||||
* I'm not sure I agree with this, but hey.. FHIR-I voted and this was the result :)
|
|
||||||
*/
|
|
||||||
// singleCodePredicates.add(theBuilder.isNull(theFrom.get("myValue")));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Predicate singleCode = theBuilder.and(toArray(singleCodePredicates));
|
Predicate predicate = hashField.in(values);
|
||||||
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, singleCode);
|
if (modifier == TokenParamModifier.NOT) {
|
||||||
|
Predicate identityPredicate = theBuilder.equal(theFrom.get("myHashIdentity").as(Long.class), BaseResourceIndexedSearchParam.calculateHashIdentity(theResourceName, theParamName));
|
||||||
|
Predicate disjunctionPredicate = theBuilder.not(predicate);
|
||||||
|
predicate = theBuilder.and(identityPredicate, disjunctionPredicate);
|
||||||
|
}
|
||||||
|
return predicate;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1371,8 +1361,8 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
if (myParams.getEverythingMode() != null) {
|
if (myParams.getEverythingMode() != null) {
|
||||||
Join<ResourceTable, ResourceLink> join = myResourceTableRoot.join("myResourceLinks", JoinType.LEFT);
|
Join<ResourceTable, ResourceLink> join = myResourceTableRoot.join("myResourceLinks", JoinType.LEFT);
|
||||||
|
|
||||||
if (myParams.get(BaseResource.SP_RES_ID) != null) {
|
if (myParams.get(IAnyResource.SP_RES_ID) != null) {
|
||||||
StringParam idParm = (StringParam) myParams.get(BaseResource.SP_RES_ID).get(0).get(0);
|
StringParam idParm = (StringParam) myParams.get(IAnyResource.SP_RES_ID).get(0).get(0);
|
||||||
Long pid = BaseHapiFhirDao.translateForcedIdToPid(myResourceName, idParm.getValue(), myForcedIdDao);
|
Long pid = BaseHapiFhirDao.translateForcedIdToPid(myResourceName, idParm.getValue(), myForcedIdDao);
|
||||||
if (myAlsoIncludePids == null) {
|
if (myAlsoIncludePids == null) {
|
||||||
myAlsoIncludePids = new ArrayList<>(1);
|
myAlsoIncludePids = new ArrayList<>(1);
|
||||||
|
@ -1462,7 +1452,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (BaseResource.SP_RES_ID.equals(theSort.getParamName())) {
|
if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) {
|
||||||
From<?, ?> forcedIdJoin = theFrom.join("myForcedId", JoinType.LEFT);
|
From<?, ?> forcedIdJoin = theFrom.join("myForcedId", JoinType.LEFT);
|
||||||
if (theSort.getOrder() == null || theSort.getOrder() == SortOrderEnum.ASC) {
|
if (theSort.getOrder() == null || theSort.getOrder() == SortOrderEnum.ASC) {
|
||||||
theOrders.add(theBuilder.asc(forcedIdJoin.get("myForcedId")));
|
theOrders.add(theBuilder.asc(forcedIdJoin.get("myForcedId")));
|
||||||
|
@ -1602,35 +1592,39 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
|
|
||||||
private void doLoadPids(List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation, EntityManager entityManager, FhirContext context, IDao theDao,
|
private void doLoadPids(List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation, EntityManager entityManager, FhirContext context, IDao theDao,
|
||||||
Map<Long, Integer> position, Collection<Long> pids) {
|
Map<Long, Integer> position, Collection<Long> pids) {
|
||||||
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
|
|
||||||
CriteriaQuery<ResourceTable> cq = builder.createQuery(ResourceTable.class);
|
|
||||||
Root<ResourceTable> from = cq.from(ResourceTable.class);
|
|
||||||
cq.where(from.get("myId").in(pids));
|
|
||||||
TypedQuery<ResourceTable> q = entityManager.createQuery(cq);
|
|
||||||
|
|
||||||
List<ResourceTable> resultList = q.getResultList();
|
// -- get the resource from the searchView
|
||||||
|
Collection<ResourceSearchView> resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(pids);
|
||||||
for (ResourceTable next : resultList) {
|
|
||||||
|
//-- preload all tags with tag definition if any
|
||||||
|
Map<Long, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList);
|
||||||
|
|
||||||
|
Long resourceId = null;
|
||||||
|
for (ResourceSearchView next : resourceSearchViewList) {
|
||||||
|
|
||||||
Class<? extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass();
|
Class<? extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass();
|
||||||
IBaseResource resource = theDao.toResource(resourceType, next, theForHistoryOperation);
|
|
||||||
|
resourceId = next.getId();
|
||||||
|
|
||||||
|
IBaseResource resource = theDao.toResource(resourceType, next, tagMap.get(resourceId), theForHistoryOperation);
|
||||||
if (resource == null) {
|
if (resource == null) {
|
||||||
ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion());
|
ourLog.warn("Unable to find resource {}/{}/_history/{} in database", next.getResourceType(), next.getIdDt().getIdPart(), next.getVersion());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Integer index = position.get(next.getId());
|
Integer index = position.get(resourceId);
|
||||||
if (index == null) {
|
if (index == null) {
|
||||||
ourLog.warn("Got back unexpected resource PID {}", next.getId());
|
ourLog.warn("Got back unexpected resource PID {}", resourceId);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (resource instanceof IResource) {
|
if (resource instanceof IResource) {
|
||||||
if (theRevIncludedPids.contains(next.getId())) {
|
if (theRevIncludedPids.contains(resourceId)) {
|
||||||
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.INCLUDE);
|
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.INCLUDE);
|
||||||
} else {
|
} else {
|
||||||
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.MATCH);
|
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.MATCH);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (theRevIncludedPids.contains(next.getId())) {
|
if (theRevIncludedPids.contains(resourceId)) {
|
||||||
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.INCLUDE.getCode());
|
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.INCLUDE.getCode());
|
||||||
} else {
|
} else {
|
||||||
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.MATCH.getCode());
|
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.MATCH.getCode());
|
||||||
|
@ -1641,6 +1635,44 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Map<Long, Collection<ResourceTag>> getResourceTagMap(Collection<ResourceSearchView> theResourceSearchViewList) {
|
||||||
|
|
||||||
|
List<Long> idList = new ArrayList<Long>(theResourceSearchViewList.size());
|
||||||
|
|
||||||
|
//-- find all resource has tags
|
||||||
|
for (ResourceSearchView resource: theResourceSearchViewList) {
|
||||||
|
if (resource.isHasTags())
|
||||||
|
idList.add(resource.getId());
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<Long, Collection<ResourceTag>> tagMap = new HashMap<>();
|
||||||
|
|
||||||
|
//-- no tags
|
||||||
|
if (idList.size() == 0)
|
||||||
|
return tagMap;
|
||||||
|
|
||||||
|
//-- get all tags for the idList
|
||||||
|
Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList);
|
||||||
|
|
||||||
|
//-- build the map, key = resourceId, value = list of ResourceTag
|
||||||
|
Long resourceId;
|
||||||
|
Collection<ResourceTag> tagCol;
|
||||||
|
for (ResourceTag tag : tagList) {
|
||||||
|
|
||||||
|
resourceId = tag.getResourceId();
|
||||||
|
tagCol = tagMap.get(resourceId);
|
||||||
|
if (tagCol == null) {
|
||||||
|
tagCol = new ArrayList<>();
|
||||||
|
tagCol.add(tag);
|
||||||
|
tagMap.put(resourceId, tagCol);
|
||||||
|
} else {
|
||||||
|
tagCol.add(tag);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tagMap;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation,
|
public void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation,
|
||||||
EntityManager entityManager, FhirContext context, IDao theDao) {
|
EntityManager entityManager, FhirContext context, IDao theDao) {
|
||||||
|
@ -1677,18 +1709,16 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* THIS SHOULD RETURN HASHSET and not jsut Set because we add to it later (so it can't be Collections.emptySet())
|
* THIS SHOULD RETURN HASHSET and not just Set because we add to it later (so it can't be Collections.emptySet())
|
||||||
*
|
|
||||||
* @param theLastUpdated
|
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public HashSet<Long> loadReverseIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes,
|
public HashSet<Long> loadReverseIncludes(IDao theCallingDao, FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes,
|
||||||
boolean theReverseMode, DateRangeParam theLastUpdated) {
|
boolean theReverseMode, DateRangeParam theLastUpdated) {
|
||||||
if (theMatches.size() == 0) {
|
if (theMatches.size() == 0) {
|
||||||
return new HashSet<Long>();
|
return new HashSet<>();
|
||||||
}
|
}
|
||||||
if (theRevIncludes == null || theRevIncludes.isEmpty()) {
|
if (theRevIncludes == null || theRevIncludes.isEmpty()) {
|
||||||
return new HashSet<Long>();
|
return new HashSet<>();
|
||||||
}
|
}
|
||||||
String searchFieldName = theReverseMode ? "myTargetResourcePid" : "mySourceResourcePid";
|
String searchFieldName = theReverseMode ? "myTargetResourcePid" : "mySourceResourcePid";
|
||||||
|
|
||||||
|
@ -1729,7 +1759,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
List<String> paths;
|
List<String> paths;
|
||||||
RuntimeSearchParam param = null;
|
RuntimeSearchParam param;
|
||||||
String resType = nextInclude.getParamType();
|
String resType = nextInclude.getParamType();
|
||||||
if (isBlank(resType)) {
|
if (isBlank(resType)) {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -59,7 +59,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
|
||||||
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||||
}
|
}
|
||||||
|
|
||||||
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
|
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
|
||||||
nextEntity.setResource(theEntity);
|
nextEntity.setResource(theEntity);
|
||||||
retVal.add(nextEntity);
|
retVal.add(nextEntity);
|
||||||
}
|
}
|
||||||
|
@ -68,7 +68,7 @@ public class SearchParamExtractorDstu2 extends BaseSearchParamExtractor implemen
|
||||||
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
|
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
|
||||||
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||||
}
|
}
|
||||||
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
|
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
|
||||||
nextEntity.setResource(theEntity);
|
nextEntity.setResource(theEntity);
|
||||||
retVal.add(nextEntity);
|
retVal.add(nextEntity);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package ca.uhn.fhir.jpa.dao.data;
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -38,5 +39,7 @@ public interface IForcedIdDao extends JpaRepository<ForcedId, Long> {
|
||||||
|
|
||||||
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid = :resource_pid")
|
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid = :resource_pid")
|
||||||
public ForcedId findByResourcePid(@Param("resource_pid") Long theResourcePid);
|
public ForcedId findByResourcePid(@Param("resource_pid") Long theResourcePid);
|
||||||
|
|
||||||
|
@Query("SELECT f FROM ForcedId f WHERE f.myResourcePid in (:pids)")
|
||||||
|
Collection<ForcedId> findByResourcePids(@Param("pids") Collection<Long> pids);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
package ca.uhn.fhir.jpa.dao.data;
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
|
import java.util.Collection;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
import javax.persistence.TemporalType;
|
||||||
|
|
||||||
import org.springframework.data.domain.Pageable;
|
import org.springframework.data.domain.Pageable;
|
||||||
import org.springframework.data.domain.Slice;
|
import org.springframework.data.domain.Slice;
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
@ -8,8 +12,7 @@ import org.springframework.data.jpa.repository.Query;
|
||||||
import org.springframework.data.jpa.repository.Temporal;
|
import org.springframework.data.jpa.repository.Temporal;
|
||||||
import org.springframework.data.repository.query.Param;
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
import javax.persistence.TemporalType;
|
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
|
||||||
import java.util.Date;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
|
@ -82,4 +85,10 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
|
||||||
"LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " +
|
"LEFT OUTER JOIN ResourceTable t ON (v.myResourceId = t.myId) " +
|
||||||
"WHERE v.myResourceVersion != t.myVersion")
|
"WHERE v.myResourceVersion != t.myVersion")
|
||||||
Slice<Long> findIdsOfPreviousVersionsOfResources(Pageable thePage);
|
Slice<Long> findIdsOfPreviousVersionsOfResources(Pageable thePage);
|
||||||
|
|
||||||
|
@Query("" +
|
||||||
|
"SELECT h FROM ResourceHistoryTable h " +
|
||||||
|
"INNER JOIN ResourceTable r ON (r.myId = h.myResourceId and r.myVersion = h.myResourceVersion) " +
|
||||||
|
"WHERE r.myId in (:pids)")
|
||||||
|
Collection<ResourceHistoryTable> findByResourceIds(@Param("pids") Collection<Long> pids);
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,12 @@ package ca.uhn.fhir.jpa.dao.data;
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
|
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
public interface IResourceIndexedSearchParamStringDao extends JpaRepository<ResourceIndexedSearchParamString, Long> {
|
public interface IResourceIndexedSearchParamStringDao extends JpaRepository<ResourceIndexedSearchParamString, Long> {
|
||||||
// nothing yet
|
|
||||||
|
@Query("select count(*) from ResourceIndexedSearchParamString t WHERE t.myResourcePid = :resid")
|
||||||
|
int countForResourceId(@Param("resid") Long theResourcePid);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,10 +20,14 @@ package ca.uhn.fhir.jpa.dao.data;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
|
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
public interface IResourceIndexedSearchParamTokenDao extends JpaRepository<ResourceIndexedSearchParamToken, Long> {
|
public interface IResourceIndexedSearchParamTokenDao extends JpaRepository<ResourceIndexedSearchParamToken, Long> {
|
||||||
// nothing yet
|
|
||||||
|
@Query("select count(*) from ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :resid")
|
||||||
|
int countForResourceId(@Param("resid") Long theResourcePid);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
package ca.uhn.fhir.jpa.dao.data;
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
* HAPI FHIR JPA Server
|
* HAPI FHIR JPA Server
|
||||||
|
@ -10,7 +12,7 @@ package ca.uhn.fhir.jpa.dao.data;
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
* You may obtain a copy of the License at
|
* You may obtain a copy of the License at
|
||||||
*
|
*
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
*
|
*
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@ -24,11 +26,10 @@ import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
import org.springframework.data.jpa.repository.Query;
|
import org.springframework.data.jpa.repository.Query;
|
||||||
import org.springframework.data.repository.query.Param;
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.SearchParam;
|
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
|
||||||
|
|
||||||
public interface ISearchParamDao extends JpaRepository<SearchParam, Long> {
|
public interface IResourceSearchViewDao extends JpaRepository<ResourceSearchView, Long> {
|
||||||
|
|
||||||
@Query("SELECT s FROM SearchParam s WHERE s.myResourceName = :resname AND s.myParamName = :parmname")
|
|
||||||
public SearchParam findForResource(@Param("resname") String theResourceType, @Param("parmname") String theParamName);
|
|
||||||
|
|
||||||
|
@Query("SELECT v FROM ResourceSearchView v WHERE v.myResourceId in (:pids)")
|
||||||
|
Collection<ResourceSearchView> findByResourceIds(@Param("pids") Collection<Long> pids);
|
||||||
}
|
}
|
|
@ -1,5 +1,7 @@
|
||||||
package ca.uhn.fhir.jpa.dao.data;
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
* HAPI FHIR JPA Server
|
* HAPI FHIR JPA Server
|
||||||
|
@ -21,9 +23,15 @@ package ca.uhn.fhir.jpa.dao.data;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceTag;
|
import ca.uhn.fhir.jpa.entity.ResourceTag;
|
||||||
|
|
||||||
public interface IResourceTagDao extends JpaRepository<ResourceTag, Long> {
|
public interface IResourceTagDao extends JpaRepository<ResourceTag, Long> {
|
||||||
// nothing
|
@Query("" +
|
||||||
|
"SELECT t FROM ResourceTag t " +
|
||||||
|
"INNER JOIN TagDefinition td ON (td.myId = t.myTagId) " +
|
||||||
|
"WHERE t.myResourceId in (:pids)")
|
||||||
|
Collection<ResourceTag> findByResourceIds(@Param("pids") Collection<Long> pids);
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,19 +36,19 @@ import ca.uhn.fhir.jpa.entity.Search;
|
||||||
public interface ISearchDao extends JpaRepository<Search, Long> {
|
public interface ISearchDao extends JpaRepository<Search, Long> {
|
||||||
|
|
||||||
@Query("SELECT s FROM Search s WHERE s.myUuid = :uuid")
|
@Query("SELECT s FROM Search s WHERE s.myUuid = :uuid")
|
||||||
public Search findByUuid(@Param("uuid") String theUuid);
|
Search findByUuid(@Param("uuid") String theUuid);
|
||||||
|
|
||||||
@Query("SELECT s.myId FROM Search s WHERE s.mySearchLastReturned < :cutoff")
|
@Query("SELECT s.myId FROM Search s WHERE s.mySearchLastReturned < :cutoff")
|
||||||
public Slice<Long> findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, Pageable thePage);
|
Slice<Long> findWhereLastReturnedBefore(@Param("cutoff") Date theCutoff, Pageable thePage);
|
||||||
|
|
||||||
// @Query("SELECT s FROM Search s WHERE s.myCreated < :cutoff")
|
// @Query("SELECT s FROM Search s WHERE s.myCreated < :cutoff")
|
||||||
// public Collection<Search> findWhereCreatedBefore(@Param("cutoff") Date theCutoff);
|
// public Collection<Search> findWhereCreatedBefore(@Param("cutoff") Date theCutoff);
|
||||||
|
|
||||||
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND s.myCreated > :cutoff")
|
@Query("SELECT s FROM Search s WHERE s.myResourceType = :type AND mySearchQueryStringHash = :hash AND s.myCreated > :cutoff")
|
||||||
public Collection<Search> find(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
|
Collection<Search> find(@Param("type") String theResourceType, @Param("hash") int theHashCode, @Param("cutoff") Date theCreatedCutoff);
|
||||||
|
|
||||||
@Modifying
|
@Modifying
|
||||||
@Query("UPDATE Search s SET s.mySearchLastReturned = :last WHERE s.myId = :pid")
|
@Query("UPDATE Search s SET s.mySearchLastReturned = :last WHERE s.myId = :pid")
|
||||||
public void updateSearchLastReturned(@Param("pid") long thePid, @Param("last") Date theDate);
|
void updateSearchLastReturned(@Param("pid") long thePid, @Param("last") Date theDate);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,8 +38,8 @@ public interface ISearchResultDao extends JpaRepository<SearchResult, Long> {
|
||||||
@Query(value="SELECT r FROM SearchResult r WHERE r.mySearch = :search")
|
@Query(value="SELECT r FROM SearchResult r WHERE r.mySearch = :search")
|
||||||
Collection<SearchResult> findWithSearchUuid(@Param("search") Search theSearch);
|
Collection<SearchResult> findWithSearchUuid(@Param("search") Search theSearch);
|
||||||
|
|
||||||
@Query(value="SELECT r FROM SearchResult r WHERE r.mySearch = :search ORDER BY r.myOrder ASC")
|
@Query(value="SELECT r.myResourcePid FROM SearchResult r WHERE r.mySearch = :search ORDER BY r.myOrder ASC")
|
||||||
Page<SearchResult> findWithSearchUuid(@Param("search") Search theSearch, Pageable thePage);
|
Page<Long> findWithSearchUuid(@Param("search") Search theSearch, Pageable thePage);
|
||||||
|
|
||||||
@Modifying
|
@Modifying
|
||||||
@Query(value="DELETE FROM SearchResult r WHERE r.mySearchPid = :search")
|
@Query(value="DELETE FROM SearchResult r WHERE r.mySearchPid = :search")
|
||||||
|
|
|
@ -26,6 +26,8 @@ import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
public interface ITermCodeSystemDao extends JpaRepository<TermCodeSystem, Long> {
|
public interface ITermCodeSystemDao extends JpaRepository<TermCodeSystem, Long> {
|
||||||
|
|
||||||
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myCodeSystemUri = :code_system_uri")
|
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myCodeSystemUri = :code_system_uri")
|
||||||
|
@ -34,4 +36,7 @@ public interface ITermCodeSystemDao extends JpaRepository<TermCodeSystem, Long>
|
||||||
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myResourcePid = :resource_pid")
|
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myResourcePid = :resource_pid")
|
||||||
TermCodeSystem findByResourcePid(@Param("resource_pid") Long theReourcePid);
|
TermCodeSystem findByResourcePid(@Param("resource_pid") Long theReourcePid);
|
||||||
|
|
||||||
|
@Query("SELECT cs FROM TermCodeSystem cs WHERE cs.myCurrentVersion.myId = :csv_pid")
|
||||||
|
Optional<TermCodeSystem> findWithCodeSystemVersionAsCurrentVersion(@Param("csv_pid") Long theCodeSystemVersionPid);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,16 @@
|
||||||
package ca.uhn.fhir.jpa.dao.data;
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
import java.util.List;
|
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
import org.springframework.data.domain.Page;
|
import org.springframework.data.domain.Page;
|
||||||
import org.springframework.data.domain.Pageable;
|
import org.springframework.data.domain.Pageable;
|
||||||
|
import org.springframework.data.domain.Slice;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Modifying;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
|
@ -25,14 +32,6 @@ import org.springframework.data.domain.Pageable;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
|
||||||
import org.springframework.data.jpa.repository.Modifying;
|
|
||||||
import org.springframework.data.jpa.repository.Query;
|
|
||||||
import org.springframework.data.repository.query.Param;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
|
||||||
|
|
||||||
public interface ITermConceptDao extends JpaRepository<TermConcept, Long> {
|
public interface ITermConceptDao extends JpaRepository<TermConcept, Long> {
|
||||||
|
|
||||||
@Query("SELECT c FROM TermConcept c WHERE c.myCodeSystem = :code_system AND c.myCode = :code")
|
@Query("SELECT c FROM TermConcept c WHERE c.myCodeSystem = :code_system AND c.myCode = :code")
|
||||||
|
@ -42,14 +41,13 @@ public interface ITermConceptDao extends JpaRepository<TermConcept, Long> {
|
||||||
List<TermConcept> findByCodeSystemVersion(@Param("code_system") TermCodeSystemVersion theCodeSystem);
|
List<TermConcept> findByCodeSystemVersion(@Param("code_system") TermCodeSystemVersion theCodeSystem);
|
||||||
|
|
||||||
@Query("SELECT t FROM TermConcept t WHERE t.myCodeSystem.myId = :cs_pid")
|
@Query("SELECT t FROM TermConcept t WHERE t.myCodeSystem.myId = :cs_pid")
|
||||||
@Modifying
|
Slice<TermConcept> findByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
|
||||||
List<TermConcept> findByCodeSystemVersion(@Param("cs_pid") Long thePid);
|
|
||||||
|
@Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null")
|
||||||
|
Page<TermConcept> findResourcesRequiringReindexing(Pageable thePageRequest);
|
||||||
|
|
||||||
@Query("UPDATE TermConcept t SET t.myIndexStatus = null")
|
@Query("UPDATE TermConcept t SET t.myIndexStatus = null")
|
||||||
@Modifying
|
@Modifying
|
||||||
int markAllForReindexing();
|
int markAllForReindexing();
|
||||||
|
|
||||||
@Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null")
|
|
||||||
Page<TermConcept> findResourcesRequiringReindexing(Pageable thePageRequest);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,11 @@
|
||||||
package ca.uhn.fhir.jpa.dao.data;
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
|
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
|
||||||
|
import org.springframework.data.domain.Pageable;
|
||||||
|
import org.springframework.data.domain.Slice;
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
|
@ -24,5 +28,8 @@ import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public interface ITermConceptDesignationDao extends JpaRepository<TermConceptDesignation, Long> {
|
public interface ITermConceptDesignationDao extends JpaRepository<TermConceptDesignation, Long> {
|
||||||
// nothing
|
|
||||||
|
@Query("SELECT t FROM TermConceptDesignation t WHERE t.myCodeSystemVersion.myId = :csv_pid")
|
||||||
|
Slice<TermConceptDesignation> findByCodeSystemVersion(Pageable thePage, @Param("csv_pid") Long thePid);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,12 @@
|
||||||
package ca.uhn.fhir.jpa.dao.data;
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||||
|
import org.springframework.data.domain.Pageable;
|
||||||
|
import org.springframework.data.domain.Slice;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -22,20 +29,12 @@ import java.util.Collection;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
|
||||||
import org.springframework.data.jpa.repository.Modifying;
|
|
||||||
import org.springframework.data.jpa.repository.Query;
|
|
||||||
import org.springframework.data.repository.query.Param;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
|
||||||
|
|
||||||
public interface ITermConceptParentChildLinkDao extends JpaRepository<TermConceptParentChildLink, Long> {
|
public interface ITermConceptParentChildLinkDao extends JpaRepository<TermConceptParentChildLink, Long> {
|
||||||
|
|
||||||
@Query("DELETE FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
|
|
||||||
@Modifying
|
|
||||||
void deleteByCodeSystemVersion(@Param("cs_pid") Long thePid);
|
|
||||||
|
|
||||||
@Query("SELECT t.myParentPid FROM TermConceptParentChildLink t WHERE t.myChildPid = :child_pid")
|
@Query("SELECT t.myParentPid FROM TermConceptParentChildLink t WHERE t.myChildPid = :child_pid")
|
||||||
Collection<Long> findAllWithChild(@Param("child_pid") Long theConceptPid);
|
Collection<Long> findAllWithChild(@Param("child_pid") Long theConceptPid);
|
||||||
|
|
||||||
|
@Query("SELECT t FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
|
||||||
|
Slice<TermConceptParentChildLink> findByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +1,12 @@
|
||||||
package ca.uhn.fhir.jpa.dao.data;
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
||||||
import org.springframework.data.domain.Page;
|
|
||||||
import org.springframework.data.domain.Pageable;
|
import org.springframework.data.domain.Pageable;
|
||||||
|
import org.springframework.data.domain.Slice;
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
import org.springframework.data.jpa.repository.Modifying;
|
|
||||||
import org.springframework.data.jpa.repository.Query;
|
import org.springframework.data.jpa.repository.Query;
|
||||||
import org.springframework.data.repository.query.Param;
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* #%L
|
* #%L
|
||||||
* HAPI FHIR JPA Server
|
* HAPI FHIR JPA Server
|
||||||
|
@ -33,5 +28,8 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public interface ITermConceptPropertyDao extends JpaRepository<TermConceptProperty, Long> {
|
public interface ITermConceptPropertyDao extends JpaRepository<TermConceptProperty, Long> {
|
||||||
// nothing
|
|
||||||
|
@Query("SELECT t FROM TermConceptProperty t WHERE t.myCodeSystemVersion.myId = :cs_pid")
|
||||||
|
Slice<TermConceptProperty> findByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -160,13 +160,16 @@ public class FhirResourceDaoConceptMapDstu3 extends FhirResourceDaoDstu3<Concept
|
||||||
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
||||||
ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime, theForceUpdate, theCreateNewHistoryEntry);
|
ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime, theForceUpdate, theCreateNewHistoryEntry);
|
||||||
|
|
||||||
ConceptMap conceptMap = (ConceptMap) theResource;
|
if (retVal.getDeleted() == null) {
|
||||||
|
try {
|
||||||
// Convert from DSTU3 to R4
|
ConceptMap conceptMap = (ConceptMap) theResource;
|
||||||
try {
|
org.hl7.fhir.r4.model.ConceptMap converted = VersionConvertor_30_40.convertConceptMap(conceptMap);
|
||||||
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, VersionConvertor_30_40.convertConceptMap(conceptMap));
|
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, converted);
|
||||||
} catch (FHIRException fe) {
|
} catch (FHIRException fe) {
|
||||||
throw new InternalErrorException(fe);
|
throw new InternalErrorException(fe);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
|
||||||
}
|
}
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
|
|
|
@ -26,12 +26,13 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
|
import javax.annotation.PostConstruct;
|
||||||
import javax.measure.unit.NonSI;
|
import javax.measure.unit.NonSI;
|
||||||
import javax.measure.unit.Unit;
|
import javax.measure.unit.Unit;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.tuple.Pair;
|
import org.apache.commons.lang3.tuple.Pair;
|
||||||
import org.hl7.fhir.dstu3.context.IWorkerContext;
|
import org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext;
|
||||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||||
import org.hl7.fhir.dstu3.model.*;
|
import org.hl7.fhir.dstu3.model.*;
|
||||||
import org.hl7.fhir.dstu3.model.CapabilityStatement.CapabilityStatementRestSecurityComponent;
|
import org.hl7.fhir.dstu3.model.CapabilityStatement.CapabilityStatementRestSecurityComponent;
|
||||||
|
@ -58,6 +59,13 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
|
||||||
@Autowired
|
@Autowired
|
||||||
private org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport myValidationSupport;
|
private org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport myValidationSupport;
|
||||||
|
|
||||||
|
private HapiWorkerContext myWorkerContext;
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void start() {
|
||||||
|
myWorkerContext = new HapiWorkerContext(getContext(), myValidationSupport);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
|
@ -65,8 +73,8 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchParamExtractorDstu3(FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
|
public SearchParamExtractorDstu3(DaoConfig theDaoConfig, FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
|
||||||
super(theCtx, theSearchParamRegistry);
|
super(theDaoConfig, theCtx, theSearchParamRegistry);
|
||||||
myValidationSupport = theValidationSupport;
|
myValidationSupport = theValidationSupport;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -78,7 +86,7 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
|
||||||
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||||
}
|
}
|
||||||
|
|
||||||
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
|
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
|
||||||
nextEntity.setResource(theEntity);
|
nextEntity.setResource(theEntity);
|
||||||
retVal.add(nextEntity);
|
retVal.add(nextEntity);
|
||||||
}
|
}
|
||||||
|
@ -87,7 +95,7 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
|
||||||
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
|
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
|
||||||
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||||
}
|
}
|
||||||
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
|
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
|
||||||
nextEntity.setResource(theEntity);
|
nextEntity.setResource(theEntity);
|
||||||
retVal.add(nextEntity);
|
retVal.add(nextEntity);
|
||||||
}
|
}
|
||||||
|
@ -695,8 +703,7 @@ public class SearchParamExtractorDstu3 extends BaseSearchParamExtractor implemen
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected List<Object> extractValues(String thePaths, IBaseResource theResource) {
|
protected List<Object> extractValues(String thePaths, IBaseResource theResource) {
|
||||||
IWorkerContext worker = new org.hl7.fhir.dstu3.hapi.ctx.HapiWorkerContext(getContext(), myValidationSupport);
|
FHIRPathEngine fp = new FHIRPathEngine(myWorkerContext);
|
||||||
FHIRPathEngine fp = new FHIRPathEngine(worker);
|
|
||||||
|
|
||||||
List<Object> values = new ArrayList<>();
|
List<Object> values = new ArrayList<>();
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -38,7 +38,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult;
|
import org.hl7.fhir.r4.hapi.ctx.IValidationSupport.CodeValidationResult;
|
||||||
import org.hl7.fhir.r4.hapi.ctx.ValidationSupportChain;
|
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
|
||||||
import org.hl7.fhir.r4.model.CodeSystem;
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
import org.hl7.fhir.r4.model.CodeSystem.CodeSystemContentMode;
|
import org.hl7.fhir.r4.model.CodeSystem.CodeSystemContentMode;
|
||||||
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
|
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
|
||||||
|
@ -47,7 +47,6 @@ import org.hl7.fhir.r4.model.Coding;
|
||||||
import org.hl7.fhir.r4.model.IdType;
|
import org.hl7.fhir.r4.model.IdType;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -156,9 +156,12 @@ public class FhirResourceDaoConceptMapR4 extends FhirResourceDaoR4<ConceptMap> i
|
||||||
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
boolean theUpdateVersion, Date theUpdateTime, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
||||||
ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime, theForceUpdate, theCreateNewHistoryEntry);
|
ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theUpdateTime, theForceUpdate, theCreateNewHistoryEntry);
|
||||||
|
|
||||||
ConceptMap conceptMap = (ConceptMap) theResource;
|
if (retVal.getDeleted() == null) {
|
||||||
|
ConceptMap conceptMap = (ConceptMap) theResource;
|
||||||
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, conceptMap);
|
myHapiTerminologySvc.storeTermConceptMapAndChildren(retVal, conceptMap);
|
||||||
|
} else {
|
||||||
|
myHapiTerminologySvc.deleteConceptMapAndChildren(retVal);
|
||||||
|
}
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,8 +64,8 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchParamExtractorR4(FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
|
public SearchParamExtractorR4(DaoConfig theDaoConfig, FhirContext theCtx, IValidationSupport theValidationSupport, ISearchParamRegistry theSearchParamRegistry) {
|
||||||
super(theCtx, theSearchParamRegistry);
|
super(theDaoConfig, theCtx, theSearchParamRegistry);
|
||||||
myValidationSupport = theValidationSupport;
|
myValidationSupport = theValidationSupport;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
|
||||||
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
searchTerm = searchTerm.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||||
}
|
}
|
||||||
|
|
||||||
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
|
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), resourceName, BaseHapiFhirDao.normalizeString(searchTerm), searchTerm);
|
||||||
nextEntity.setResource(theEntity);
|
nextEntity.setResource(theEntity);
|
||||||
retVal.add(nextEntity);
|
retVal.add(nextEntity);
|
||||||
}
|
}
|
||||||
|
@ -86,7 +86,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
|
||||||
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
|
if (value.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
|
||||||
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
value = value.substring(0, ResourceIndexedSearchParamString.MAX_LENGTH);
|
||||||
}
|
}
|
||||||
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
|
ResourceIndexedSearchParamString nextEntity = new ResourceIndexedSearchParamString(getDaoConfig(), nextSpDef.getName(), BaseHapiFhirDao.normalizeString(value), value);
|
||||||
nextEntity.setResource(theEntity);
|
nextEntity.setResource(theEntity);
|
||||||
retVal.add(nextEntity);
|
retVal.add(nextEntity);
|
||||||
}
|
}
|
||||||
|
@ -104,7 +104,7 @@ public class SearchParamExtractorR4 extends BaseSearchParamExtractor implements
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Set<ResourceIndexedSearchParamDate> extractSearchParamDates(ResourceTable theEntity, IBaseResource theResource) {
|
public Set<ResourceIndexedSearchParamDate> extractSearchParamDates(ResourceTable theEntity, IBaseResource theResource) {
|
||||||
HashSet<ResourceIndexedSearchParamDate> retVal = new HashSet<ResourceIndexedSearchParamDate>();
|
HashSet<ResourceIndexedSearchParamDate> retVal = new HashSet<>();
|
||||||
|
|
||||||
Collection<RuntimeSearchParam> searchParams = getSearchParams(theResource);
|
Collection<RuntimeSearchParam> searchParams = getSearchParams(theResource);
|
||||||
for (RuntimeSearchParam nextSpDef : searchParams) {
|
for (RuntimeSearchParam nextSpDef : searchParams) {
|
||||||
|
|
|
@ -30,7 +30,7 @@ import java.util.Collection;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
@MappedSuperclass
|
@MappedSuperclass
|
||||||
public abstract class BaseHasResource {
|
public abstract class BaseHasResource implements IBaseResourceEntity {
|
||||||
|
|
||||||
@Column(name = "RES_DELETED_AT", nullable = true)
|
@Column(name = "RES_DELETED_AT", nullable = true)
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@ -42,7 +42,7 @@ public abstract class BaseHasResource {
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
private FhirVersionEnum myFhirVersion;
|
private FhirVersionEnum myFhirVersion;
|
||||||
|
|
||||||
@OneToOne(optional = true, fetch = FetchType.EAGER, cascade = {}, orphanRemoval = false)
|
@OneToOne(optional = true, fetch = FetchType.LAZY, cascade = {}, orphanRemoval = false)
|
||||||
@JoinColumn(name = "FORCED_ID_PID")
|
@JoinColumn(name = "FORCED_ID_PID")
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
private ForcedId myForcedId;
|
private ForcedId myForcedId;
|
||||||
|
|
|
@ -36,13 +36,15 @@ import java.util.Date;
|
||||||
|
|
||||||
@MappedSuperclass
|
@MappedSuperclass
|
||||||
public abstract class BaseResourceIndexedSearchParam implements Serializable {
|
public abstract class BaseResourceIndexedSearchParam implements Serializable {
|
||||||
/** Don't change this without careful consideration. You will break existing hashes! */
|
|
||||||
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0);
|
|
||||||
/** Don't make this public 'cause nobody better touch it! */
|
|
||||||
private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8);
|
|
||||||
|
|
||||||
static final int MAX_SP_NAME = 100;
|
static final int MAX_SP_NAME = 100;
|
||||||
|
/**
|
||||||
|
* Don't change this without careful consideration. You will break existing hashes!
|
||||||
|
*/
|
||||||
|
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0);
|
||||||
|
/**
|
||||||
|
* Don't make this public 'cause nobody better be able to modify it!
|
||||||
|
*/
|
||||||
|
private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8);
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
// TODO: make this nullable=false and a primitive (written may 2017)
|
// TODO: make this nullable=false and a primitive (written may 2017)
|
||||||
|
@ -71,6 +73,13 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
private Date myUpdated;
|
private Date myUpdated;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Subclasses may override
|
||||||
|
*/
|
||||||
|
protected void clearHashes() {
|
||||||
|
// nothing
|
||||||
|
}
|
||||||
|
|
||||||
protected abstract Long getId();
|
protected abstract Long getId();
|
||||||
|
|
||||||
public String getParamName() {
|
public String getParamName() {
|
||||||
|
@ -82,13 +91,6 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
|
||||||
myParamName = theName;
|
myParamName = theName;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Subclasses may override
|
|
||||||
*/
|
|
||||||
protected void clearHashes() {
|
|
||||||
// nothing
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResourceTable getResource() {
|
public ResourceTable getResource() {
|
||||||
return myResource;
|
return myResource;
|
||||||
}
|
}
|
||||||
|
@ -127,6 +129,10 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
|
||||||
|
|
||||||
public abstract IQueryParameterType toQueryParameterType();
|
public abstract IQueryParameterType toQueryParameterType();
|
||||||
|
|
||||||
|
public static long calculateHashIdentity(String theResourceType, String theParamName) {
|
||||||
|
return hash(theResourceType, theParamName);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Applies a fast and consistent hashing algorithm to a set of strings
|
* Applies a fast and consistent hashing algorithm to a set of strings
|
||||||
*/
|
*/
|
||||||
|
@ -148,5 +154,4 @@ public abstract class BaseResourceIndexedSearchParam implements Serializable {
|
||||||
return hashCode.asLong();
|
return hashCode.asLong();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,27 +20,17 @@ package ca.uhn.fhir.jpa.entity;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import javax.persistence.Column;
|
|
||||||
import javax.persistence.Entity;
|
|
||||||
import javax.persistence.ForeignKey;
|
|
||||||
import javax.persistence.GeneratedValue;
|
|
||||||
import javax.persistence.GenerationType;
|
|
||||||
import javax.persistence.Id;
|
|
||||||
import javax.persistence.Index;
|
|
||||||
import javax.persistence.JoinColumn;
|
|
||||||
import javax.persistence.OneToOne;
|
|
||||||
import javax.persistence.SequenceGenerator;
|
|
||||||
import javax.persistence.Table;
|
|
||||||
import javax.persistence.UniqueConstraint;
|
|
||||||
|
|
||||||
import org.hibernate.annotations.ColumnDefault;
|
import org.hibernate.annotations.ColumnDefault;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
|
||||||
//@formatter:off
|
//@formatter:off
|
||||||
@Entity()
|
@Entity()
|
||||||
@Table(name = "HFJ_FORCED_ID", uniqueConstraints = {
|
@Table(name = "HFJ_FORCED_ID", uniqueConstraints = {
|
||||||
@UniqueConstraint(name = "IDX_FORCEDID_RESID", columnNames = {"RESOURCE_PID"}),
|
@UniqueConstraint(name = "IDX_FORCEDID_RESID", columnNames = {"RESOURCE_PID"}),
|
||||||
@UniqueConstraint(name = "IDX_FORCEDID_TYPE_RESID", columnNames = {"RESOURCE_TYPE", "RESOURCE_PID"})
|
@UniqueConstraint(name = "IDX_FORCEDID_TYPE_RESID", columnNames = {"RESOURCE_TYPE", "RESOURCE_PID"}),
|
||||||
}, indexes= {
|
@UniqueConstraint(name = "IDX_FORCEDID_TYPE_FID", columnNames = {"RESOURCE_TYPE", "FORCED_ID"})
|
||||||
|
}, indexes = {
|
||||||
@Index(name = "IDX_FORCEDID_TYPE_FORCEDID", columnList = "RESOURCE_TYPE,FORCED_ID"),
|
@Index(name = "IDX_FORCEDID_TYPE_FORCEDID", columnList = "RESOURCE_TYPE,FORCED_ID"),
|
||||||
})
|
})
|
||||||
//@formatter:on
|
//@formatter:on
|
||||||
|
@ -57,11 +47,11 @@ public class ForcedId {
|
||||||
@Column(name = "PID")
|
@Column(name = "PID")
|
||||||
private Long myId;
|
private Long myId;
|
||||||
|
|
||||||
@JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey=@ForeignKey(name="FK_FORCEDID_RESOURCE"))
|
@JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_FORCEDID_RESOURCE"))
|
||||||
@OneToOne()
|
@OneToOne()
|
||||||
private ResourceTable myResource;
|
private ResourceTable myResource;
|
||||||
|
|
||||||
@Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable=false)
|
@Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable = false)
|
||||||
private Long myResourcePid;
|
private Long myResourcePid;
|
||||||
|
|
||||||
// This is updatable=true because it was added in 1.6 and needs to be set.. At some
|
// This is updatable=true because it was added in 1.6 and needs to be set.. At some
|
||||||
|
@ -81,39 +71,39 @@ public class ForcedId {
|
||||||
return myForcedId;
|
return myForcedId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResourceTable getResource() {
|
|
||||||
return myResource;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Long getResourcePid() {
|
|
||||||
if (myResourcePid==null) {
|
|
||||||
return myResource.getId();
|
|
||||||
}
|
|
||||||
return myResourcePid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getResourceType() {
|
|
||||||
return myResourceType;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setForcedId(String theForcedId) {
|
public void setForcedId(String theForcedId) {
|
||||||
myForcedId = theForcedId;
|
myForcedId = theForcedId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public ResourceTable getResource() {
|
||||||
|
return myResource;
|
||||||
|
}
|
||||||
|
|
||||||
public void setResource(ResourceTable theResource) {
|
public void setResource(ResourceTable theResource) {
|
||||||
myResource = theResource;
|
myResource = theResource;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setResourcePid(Long theResourcePid) {
|
public Long getResourcePid() {
|
||||||
myResourcePid = theResourcePid;
|
if (myResourcePid == null) {
|
||||||
|
return myResource.getId();
|
||||||
|
}
|
||||||
|
return myResourcePid;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setResourcePid(ResourceTable theResourcePid) {
|
public void setResourcePid(ResourceTable theResourcePid) {
|
||||||
myResource = theResourcePid;
|
myResource = theResourcePid;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getResourceType() {
|
||||||
|
return myResourceType;
|
||||||
|
}
|
||||||
|
|
||||||
public void setResourceType(String theResourceType) {
|
public void setResourceType(String theResourceType) {
|
||||||
myResourceType = theResourceType;
|
myResourceType = theResourceType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setResourcePid(Long theResourcePid) {
|
||||||
|
myResourcePid = theResourcePid;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2018 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
|
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||||
|
|
||||||
|
public interface IBaseResourceEntity {
|
||||||
|
|
||||||
|
Date getDeleted();
|
||||||
|
FhirVersionEnum getFhirVersion();
|
||||||
|
Long getId();
|
||||||
|
IdDt getIdDt();
|
||||||
|
InstantDt getPublished();
|
||||||
|
Long getResourceId();
|
||||||
|
String getResourceType();
|
||||||
|
InstantDt getUpdated();
|
||||||
|
Date getUpdatedDate();
|
||||||
|
long getVersion();
|
||||||
|
boolean isHasTags();
|
||||||
|
}
|
|
@ -31,30 +31,32 @@ import javax.persistence.*;
|
||||||
|
|
||||||
@Embeddable
|
@Embeddable
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_SPIDX_COORDS", indexes = {
|
@Table(name = "HFJ_SPIDX_COORDS", indexes = {
|
||||||
@Index(name = "IDX_SP_COORDS", columnList = "RES_TYPE,SP_NAME,SP_LATITUDE,SP_LONGITUDE"),
|
@Index(name = "IDX_SP_COORDS", columnList = "RES_TYPE,SP_NAME,SP_LATITUDE,SP_LONGITUDE"),
|
||||||
@Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"),
|
@Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"),
|
||||||
@Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID")
|
@Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID")
|
||||||
})
|
})
|
||||||
public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchParam {
|
public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchParam {
|
||||||
|
|
||||||
public static final int MAX_LENGTH = 100;
|
public static final int MAX_LENGTH = 100;
|
||||||
|
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
@Column(name = "SP_LATITUDE")
|
||||||
|
@Field
|
||||||
|
public double myLatitude;
|
||||||
|
@Column(name = "SP_LONGITUDE")
|
||||||
|
@Field
|
||||||
|
public double myLongitude;
|
||||||
@Id
|
@Id
|
||||||
@SequenceGenerator(name = "SEQ_SPIDX_COORDS", sequenceName = "SEQ_SPIDX_COORDS")
|
@SequenceGenerator(name = "SEQ_SPIDX_COORDS", sequenceName = "SEQ_SPIDX_COORDS")
|
||||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_COORDS")
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_COORDS")
|
||||||
@Column(name = "SP_ID")
|
@Column(name = "SP_ID")
|
||||||
private Long myId;
|
private Long myId;
|
||||||
|
/**
|
||||||
@Column(name = "SP_LATITUDE")
|
* @since 3.5.0 - At some point this should be made not-null
|
||||||
@Field
|
*/
|
||||||
public double myLatitude;
|
@Column(name = "HASH_IDENTITY", nullable = true)
|
||||||
|
private Long myHashIdentity;
|
||||||
@Column(name = "SP_LONGITUDE")
|
|
||||||
@Field
|
|
||||||
public double myLongitude;
|
|
||||||
|
|
||||||
public ResourceIndexedSearchParamCoords() {
|
public ResourceIndexedSearchParamCoords() {
|
||||||
}
|
}
|
||||||
|
@ -65,6 +67,20 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
|
||||||
setLongitude(theLongitude);
|
setLongitude(theLongitude);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PrePersist
|
||||||
|
public void calculateHashes() {
|
||||||
|
if (myHashIdentity == null) {
|
||||||
|
String resourceType = getResourceType();
|
||||||
|
String paramName = getParamName();
|
||||||
|
setHashIdentity(calculateHashIdentity(resourceType, paramName));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void clearHashes() {
|
||||||
|
myHashIdentity = null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object theObj) {
|
public boolean equals(Object theObj) {
|
||||||
if (this == theObj) {
|
if (this == theObj) {
|
||||||
|
@ -82,27 +98,39 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
|
||||||
b.append(getResource(), obj.getResource());
|
b.append(getResource(), obj.getResource());
|
||||||
b.append(getLatitude(), obj.getLatitude());
|
b.append(getLatitude(), obj.getLatitude());
|
||||||
b.append(getLongitude(), obj.getLongitude());
|
b.append(getLongitude(), obj.getLongitude());
|
||||||
|
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||||
return b.isEquals();
|
return b.isEquals();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Long getHashIdentity() {
|
||||||
|
return myHashIdentity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setHashIdentity(Long theHashIdentity) {
|
||||||
|
myHashIdentity = theHashIdentity;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Long getId() {
|
protected Long getId() {
|
||||||
return myId;
|
return myId;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public IQueryParameterType toQueryParameterType() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getLatitude() {
|
public double getLatitude() {
|
||||||
return myLatitude;
|
return myLatitude;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setLatitude(double theLatitude) {
|
||||||
|
myLatitude = theLatitude;
|
||||||
|
}
|
||||||
|
|
||||||
public double getLongitude() {
|
public double getLongitude() {
|
||||||
return myLongitude;
|
return myLongitude;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setLongitude(double theLongitude) {
|
||||||
|
myLongitude = theLongitude;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
HashCodeBuilder b = new HashCodeBuilder();
|
HashCodeBuilder b = new HashCodeBuilder();
|
||||||
|
@ -113,12 +141,9 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP
|
||||||
return b.toHashCode();
|
return b.toHashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setLatitude(double theLatitude) {
|
@Override
|
||||||
myLatitude = theLatitude;
|
public IQueryParameterType toQueryParameterType() {
|
||||||
}
|
return null;
|
||||||
|
|
||||||
public void setLongitude(double theLongitude) {
|
|
||||||
myLongitude = theLongitude;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -37,17 +37,14 @@ import java.util.Date;
|
||||||
@Embeddable
|
@Embeddable
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_SPIDX_DATE", indexes = {
|
@Table(name = "HFJ_SPIDX_DATE", indexes = {
|
||||||
@Index(name = "IDX_SP_DATE", columnList = "RES_TYPE,SP_NAME,SP_VALUE_LOW,SP_VALUE_HIGH"),
|
// @Index(name = "IDX_SP_DATE", columnList = "RES_TYPE,SP_NAME,SP_VALUE_LOW,SP_VALUE_HIGH"),
|
||||||
|
@Index(name = "IDX_SP_DATE_HASH", columnList = "HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH"),
|
||||||
@Index(name = "IDX_SP_DATE_UPDATED", columnList = "SP_UPDATED"),
|
@Index(name = "IDX_SP_DATE_UPDATED", columnList = "SP_UPDATED"),
|
||||||
@Index(name = "IDX_SP_DATE_RESID", columnList = "RES_ID")
|
@Index(name = "IDX_SP_DATE_RESID", columnList = "RES_ID")
|
||||||
})
|
})
|
||||||
public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchParam {
|
public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchParam {
|
||||||
|
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
@Transient
|
|
||||||
private transient String myOriginalValue;
|
|
||||||
|
|
||||||
@Column(name = "SP_VALUE_HIGH", nullable = true)
|
@Column(name = "SP_VALUE_HIGH", nullable = true)
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
@Field
|
@Field
|
||||||
|
@ -56,11 +53,18 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
@Field
|
@Field
|
||||||
public Date myValueLow;
|
public Date myValueLow;
|
||||||
|
@Transient
|
||||||
|
private transient String myOriginalValue;
|
||||||
@Id
|
@Id
|
||||||
@SequenceGenerator(name = "SEQ_SPIDX_DATE", sequenceName = "SEQ_SPIDX_DATE")
|
@SequenceGenerator(name = "SEQ_SPIDX_DATE", sequenceName = "SEQ_SPIDX_DATE")
|
||||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_DATE")
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_DATE")
|
||||||
@Column(name = "SP_ID")
|
@Column(name = "SP_ID")
|
||||||
private Long myId;
|
private Long myId;
|
||||||
|
/**
|
||||||
|
* @since 3.5.0 - At some point this should be made not-null
|
||||||
|
*/
|
||||||
|
@Column(name = "HASH_IDENTITY", nullable = true)
|
||||||
|
private Long myHashIdentity;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
|
@ -79,6 +83,20 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
|
||||||
myOriginalValue = theOriginalValue;
|
myOriginalValue = theOriginalValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PrePersist
|
||||||
|
public void calculateHashes() {
|
||||||
|
if (myHashIdentity == null) {
|
||||||
|
String resourceType = getResourceType();
|
||||||
|
String paramName = getParamName();
|
||||||
|
setHashIdentity(calculateHashIdentity(resourceType, paramName));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void clearHashes() {
|
||||||
|
myHashIdentity = null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object theObj) {
|
public boolean equals(Object theObj) {
|
||||||
if (this == theObj) {
|
if (this == theObj) {
|
||||||
|
@ -97,9 +115,23 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
|
||||||
b.append(getResource(), obj.getResource());
|
b.append(getResource(), obj.getResource());
|
||||||
b.append(getTimeFromDate(getValueHigh()), getTimeFromDate(obj.getValueHigh()));
|
b.append(getTimeFromDate(getValueHigh()), getTimeFromDate(obj.getValueHigh()));
|
||||||
b.append(getTimeFromDate(getValueLow()), getTimeFromDate(obj.getValueLow()));
|
b.append(getTimeFromDate(getValueLow()), getTimeFromDate(obj.getValueLow()));
|
||||||
|
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||||
return b.isEquals();
|
return b.isEquals();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Long getHashIdentity() {
|
||||||
|
return myHashIdentity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setHashIdentity(Long theHashIdentity) {
|
||||||
|
myHashIdentity = theHashIdentity;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Long getId() {
|
||||||
|
return myId;
|
||||||
|
}
|
||||||
|
|
||||||
protected Long getTimeFromDate(Date date) {
|
protected Long getTimeFromDate(Date date) {
|
||||||
if (date != null) {
|
if (date != null) {
|
||||||
return date.getTime();
|
return date.getTime();
|
||||||
|
@ -107,11 +139,6 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Long getId() {
|
|
||||||
return myId;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Date getValueHigh() {
|
public Date getValueHigh() {
|
||||||
return myValueHigh;
|
return myValueHigh;
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,15 +34,14 @@ import org.hibernate.search.annotations.NumericField;
|
||||||
import javax.persistence.*;
|
import javax.persistence.*;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
|
|
||||||
//@formatter:off
|
|
||||||
@Embeddable
|
@Embeddable
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_SPIDX_NUMBER", indexes = {
|
@Table(name = "HFJ_SPIDX_NUMBER", indexes = {
|
||||||
@Index(name = "IDX_SP_NUMBER", columnList = "RES_TYPE,SP_NAME,SP_VALUE"),
|
// @Index(name = "IDX_SP_NUMBER", columnList = "RES_TYPE,SP_NAME,SP_VALUE"),
|
||||||
|
@Index(name = "IDX_SP_NUMBER_HASH_VAL", columnList = "HASH_IDENTITY,SP_VALUE"),
|
||||||
@Index(name = "IDX_SP_NUMBER_UPDATED", columnList = "SP_UPDATED"),
|
@Index(name = "IDX_SP_NUMBER_UPDATED", columnList = "SP_UPDATED"),
|
||||||
@Index(name = "IDX_SP_NUMBER_RESID", columnList = "RES_ID")
|
@Index(name = "IDX_SP_NUMBER_RESID", columnList = "RES_ID")
|
||||||
})
|
})
|
||||||
//@formatter:on
|
|
||||||
public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchParam {
|
public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchParam {
|
||||||
|
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
@ -56,6 +55,11 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
|
||||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_NUMBER")
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_NUMBER")
|
||||||
@Column(name = "SP_ID")
|
@Column(name = "SP_ID")
|
||||||
private Long myId;
|
private Long myId;
|
||||||
|
/**
|
||||||
|
* @since 3.5.0 - At some point this should be made not-null
|
||||||
|
*/
|
||||||
|
@Column(name = "HASH_IDENTITY", nullable = true)
|
||||||
|
private Long myHashIdentity;
|
||||||
|
|
||||||
public ResourceIndexedSearchParamNumber() {
|
public ResourceIndexedSearchParamNumber() {
|
||||||
}
|
}
|
||||||
|
@ -65,6 +69,20 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
|
||||||
setValue(theValue);
|
setValue(theValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@PrePersist
|
||||||
|
public void calculateHashes() {
|
||||||
|
if (myHashIdentity == null) {
|
||||||
|
String resourceType = getResourceType();
|
||||||
|
String paramName = getParamName();
|
||||||
|
setHashIdentity(calculateHashIdentity(resourceType, paramName));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void clearHashes() {
|
||||||
|
myHashIdentity = null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object theObj) {
|
public boolean equals(Object theObj) {
|
||||||
if (this == theObj) {
|
if (this == theObj) {
|
||||||
|
@ -82,9 +100,18 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP
|
||||||
b.append(getResource(), obj.getResource());
|
b.append(getResource(), obj.getResource());
|
||||||
b.append(getValue(), obj.getValue());
|
b.append(getValue(), obj.getValue());
|
||||||
b.append(isMissing(), obj.isMissing());
|
b.append(isMissing(), obj.isMissing());
|
||||||
|
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||||
return b.isEquals();
|
return b.isEquals();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Long getHashIdentity() {
|
||||||
|
return myHashIdentity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setHashIdentity(Long theHashIdentity) {
|
||||||
|
myHashIdentity = theHashIdentity;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Long getId() {
|
protected Long getId() {
|
||||||
return myId;
|
return myId;
|
||||||
|
|
|
@ -33,13 +33,14 @@ import org.hibernate.search.annotations.NumericField;
|
||||||
|
|
||||||
import javax.persistence.*;
|
import javax.persistence.*;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.math.RoundingMode;
|
|
||||||
|
|
||||||
//@formatter:off
|
//@formatter:off
|
||||||
@Embeddable
|
@Embeddable
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_SPIDX_QUANTITY", indexes = {
|
@Table(name = "HFJ_SPIDX_QUANTITY", indexes = {
|
||||||
@Index(name = "IDX_SP_QUANTITY", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_UNITS,SP_VALUE"),
|
// @Index(name = "IDX_SP_QUANTITY", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_UNITS,SP_VALUE"),
|
||||||
|
@Index(name = "IDX_SP_QUANTITY_HASH", columnList = "HASH_IDENTITY,SP_VALUE"),
|
||||||
|
@Index(name = "IDX_SP_QUANTITY_HASH_UN", columnList = "HASH_IDENTITY_AND_UNITS,SP_VALUE"),
|
||||||
@Index(name = "IDX_SP_QUANTITY_UPDATED", columnList = "SP_UPDATED"),
|
@Index(name = "IDX_SP_QUANTITY_UPDATED", columnList = "SP_UPDATED"),
|
||||||
@Index(name = "IDX_SP_QUANTITY_RESID", columnList = "RES_ID")
|
@Index(name = "IDX_SP_QUANTITY_RESID", columnList = "RES_ID")
|
||||||
})
|
})
|
||||||
|
@ -66,20 +67,26 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
|
||||||
@Column(name = "SP_ID")
|
@Column(name = "SP_ID")
|
||||||
private Long myId;
|
private Long myId;
|
||||||
/**
|
/**
|
||||||
* @since 3.4.0 - At some point this should be made not-null
|
* @since 3.5.0 - At some point this should be made not-null
|
||||||
*/
|
*/
|
||||||
@Column(name = "HASH_UNITS_AND_VALPREFIX", nullable = true)
|
@Column(name = "HASH_IDENTITY_AND_UNITS", nullable = true)
|
||||||
private Long myHashUnitsAndValPrefix;
|
private Long myHashIdentityAndUnits;
|
||||||
/**
|
/**
|
||||||
* @since 3.4.0 - At some point this should be made not-null
|
* @since 3.5.0 - At some point this should be made not-null
|
||||||
*/
|
*/
|
||||||
@Column(name = "HASH_VALPREFIX", nullable = true)
|
@Column(name = "HASH_IDENTITY_SYS_UNITS", nullable = true)
|
||||||
private Long myHashValPrefix;
|
private Long myHashIdentitySystemAndUnits;
|
||||||
|
/**
|
||||||
|
* @since 3.5.0 - At some point this should be made not-null
|
||||||
|
*/
|
||||||
|
@Column(name = "HASH_IDENTITY", nullable = true)
|
||||||
|
private Long myHashIdentity;
|
||||||
|
|
||||||
public ResourceIndexedSearchParamQuantity() {
|
public ResourceIndexedSearchParamQuantity() {
|
||||||
// nothing
|
// nothing
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public ResourceIndexedSearchParamQuantity(String theParamName, BigDecimal theValue, String theSystem, String theUnits) {
|
public ResourceIndexedSearchParamQuantity(String theParamName, BigDecimal theValue, String theSystem, String theUnits) {
|
||||||
setParamName(theParamName);
|
setParamName(theParamName);
|
||||||
setSystem(theSystem);
|
setSystem(theSystem);
|
||||||
|
@ -89,16 +96,21 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
|
||||||
|
|
||||||
@PrePersist
|
@PrePersist
|
||||||
public void calculateHashes() {
|
public void calculateHashes() {
|
||||||
if (myHashUnitsAndValPrefix == null) {
|
if (myHashIdentity == null) {
|
||||||
setHashUnitsAndValPrefix(hash(getResourceType(), getParamName(), getSystem(), getUnits(), toTruncatedString(getValue())));
|
String resourceType = getResourceType();
|
||||||
setHashValPrefix(hash(getResourceType(), getParamName(), toTruncatedString(getValue())));
|
String paramName = getParamName();
|
||||||
|
String units = getUnits();
|
||||||
|
String system = getSystem();
|
||||||
|
setHashIdentity(calculateHashIdentity(resourceType, paramName));
|
||||||
|
setHashIdentityAndUnits(calculateHashUnits(resourceType, paramName, units));
|
||||||
|
setHashIdentitySystemAndUnits(calculateHashSystemAndUnits(resourceType, paramName, system, units));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void clearHashes() {
|
protected void clearHashes() {
|
||||||
myHashUnitsAndValPrefix = null;
|
myHashIdentity = null;
|
||||||
myHashValPrefix = null;
|
myHashIdentityAndUnits = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -119,27 +131,36 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
|
||||||
b.append(getSystem(), obj.getSystem());
|
b.append(getSystem(), obj.getSystem());
|
||||||
b.append(getUnits(), obj.getUnits());
|
b.append(getUnits(), obj.getUnits());
|
||||||
b.append(getValue(), obj.getValue());
|
b.append(getValue(), obj.getValue());
|
||||||
b.append(getHashUnitsAndValPrefix(), obj.getHashUnitsAndValPrefix());
|
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||||
b.append(getHashValPrefix(), obj.getHashValPrefix());
|
b.append(getHashIdentitySystemAndUnits(), obj.getHashIdentitySystemAndUnits());
|
||||||
|
b.append(getHashIdentityAndUnits(), obj.getHashIdentityAndUnits());
|
||||||
return b.isEquals();
|
return b.isEquals();
|
||||||
}
|
}
|
||||||
|
|
||||||
public Long getHashUnitsAndValPrefix() {
|
public Long getHashIdentity() {
|
||||||
calculateHashes();
|
calculateHashes();
|
||||||
return myHashUnitsAndValPrefix;
|
return myHashIdentity;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setHashUnitsAndValPrefix(Long theHashUnitsAndValPrefix) {
|
public void setHashIdentity(Long theHashIdentity) {
|
||||||
myHashUnitsAndValPrefix = theHashUnitsAndValPrefix;
|
myHashIdentity = theHashIdentity;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Long getHashValPrefix() {
|
public Long getHashIdentityAndUnits() {
|
||||||
calculateHashes();
|
calculateHashes();
|
||||||
return myHashValPrefix;
|
return myHashIdentityAndUnits;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setHashValPrefix(Long theHashValPrefix) {
|
public void setHashIdentityAndUnits(Long theHashIdentityAndUnits) {
|
||||||
myHashValPrefix = theHashValPrefix;
|
myHashIdentityAndUnits = theHashIdentityAndUnits;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Long getHashIdentitySystemAndUnits() {
|
||||||
|
return myHashIdentitySystemAndUnits;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setHashIdentitySystemAndUnits(Long theHashIdentitySystemAndUnits) {
|
||||||
|
myHashIdentitySystemAndUnits = theHashIdentitySystemAndUnits;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -176,14 +197,13 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
|
calculateHashes();
|
||||||
HashCodeBuilder b = new HashCodeBuilder();
|
HashCodeBuilder b = new HashCodeBuilder();
|
||||||
|
b.append(getResourceType());
|
||||||
b.append(getParamName());
|
b.append(getParamName());
|
||||||
b.append(getResource());
|
|
||||||
b.append(getSystem());
|
b.append(getSystem());
|
||||||
b.append(getUnits());
|
b.append(getUnits());
|
||||||
b.append(getValue());
|
b.append(getValue());
|
||||||
b.append(getHashUnitsAndValPrefix());
|
|
||||||
b.append(getHashValPrefix());
|
|
||||||
return b.toHashCode();
|
return b.toHashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -201,14 +221,16 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc
|
||||||
b.append("units", getUnits());
|
b.append("units", getUnits());
|
||||||
b.append("value", getValue());
|
b.append("value", getValue());
|
||||||
b.append("missing", isMissing());
|
b.append("missing", isMissing());
|
||||||
|
b.append("hashIdentitySystemAndUnits", myHashIdentitySystemAndUnits);
|
||||||
return b.build();
|
return b.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String toTruncatedString(BigDecimal theValue) {
|
public static long calculateHashSystemAndUnits(String theResourceType, String theParamName, String theSystem, String theUnits) {
|
||||||
if (theValue == null) {
|
return hash(theResourceType, theParamName, theSystem, theUnits);
|
||||||
return null;
|
}
|
||||||
}
|
|
||||||
return theValue.setScale(0, RoundingMode.FLOOR).toPlainString();
|
public static long calculateHashUnits(String theResourceType, String theParamName, String theUnits) {
|
||||||
|
return hash(theResourceType, theParamName, theUnits);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.entity;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||||
import ca.uhn.fhir.rest.param.StringParam;
|
import ca.uhn.fhir.rest.param.StringParam;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
@ -38,7 +39,14 @@ import static org.apache.commons.lang3.StringUtils.left;
|
||||||
@Embeddable
|
@Embeddable
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_SPIDX_STRING", indexes = {
|
@Table(name = "HFJ_SPIDX_STRING", indexes = {
|
||||||
@Index(name = "IDX_SP_STRING", columnList = "RES_TYPE,SP_NAME,SP_VALUE_NORMALIZED"),
|
/*
|
||||||
|
* Note: We previously had indexes with the following names,
|
||||||
|
* do not reuse these names:
|
||||||
|
* IDX_SP_STRING
|
||||||
|
*/
|
||||||
|
@Index(name = "IDX_SP_STRING_HASH_NRM", columnList = "HASH_NORM_PREFIX,SP_VALUE_NORMALIZED"),
|
||||||
|
@Index(name = "IDX_SP_STRING_HASH_EXCT", columnList = "HASH_EXACT"),
|
||||||
|
|
||||||
@Index(name = "IDX_SP_STRING_UPDATED", columnList = "SP_UPDATED"),
|
@Index(name = "IDX_SP_STRING_UPDATED", columnList = "SP_UPDATED"),
|
||||||
@Index(name = "IDX_SP_STRING_RESID", columnList = "RES_ID")
|
@Index(name = "IDX_SP_STRING_RESID", columnList = "RES_ID")
|
||||||
})
|
})
|
||||||
|
@ -127,13 +135,16 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
||||||
*/
|
*/
|
||||||
@Column(name = "HASH_EXACT", nullable = true)
|
@Column(name = "HASH_EXACT", nullable = true)
|
||||||
private Long myHashExact;
|
private Long myHashExact;
|
||||||
|
@Transient
|
||||||
|
private transient DaoConfig myDaoConfig;
|
||||||
|
|
||||||
public ResourceIndexedSearchParamString() {
|
public ResourceIndexedSearchParamString() {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public ResourceIndexedSearchParamString(String theName, String theValueNormalized, String theValueExact) {
|
public ResourceIndexedSearchParamString(DaoConfig theDaoConfig, String theName, String theValueNormalized, String theValueExact) {
|
||||||
|
setDaoConfig(theDaoConfig);
|
||||||
setParamName(theName);
|
setParamName(theName);
|
||||||
setValueNormalized(theValueNormalized);
|
setValueNormalized(theValueNormalized);
|
||||||
setValueExact(theValueExact);
|
setValueExact(theValueExact);
|
||||||
|
@ -141,9 +152,13 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
||||||
|
|
||||||
@PrePersist
|
@PrePersist
|
||||||
public void calculateHashes() {
|
public void calculateHashes() {
|
||||||
if (myHashNormalizedPrefix == null) {
|
if (myHashNormalizedPrefix == null && myDaoConfig != null) {
|
||||||
setHashNormalizedPrefix(hash(getResourceType(), getParamName(), left(getValueNormalized(), HASH_PREFIX_LENGTH)));
|
String resourceType = getResourceType();
|
||||||
setHashExact(hash(getResourceType(), getParamName(), getValueExact()));
|
String paramName = getParamName();
|
||||||
|
String valueNormalized = getValueNormalized();
|
||||||
|
String valueExact = getValueExact();
|
||||||
|
setHashNormalizedPrefix(calculateHashNormalized(myDaoConfig, resourceType, paramName, valueNormalized));
|
||||||
|
setHashExact(calculateHashExact(resourceType, paramName, valueExact));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,8 +184,8 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
||||||
b.append(getParamName(), obj.getParamName());
|
b.append(getParamName(), obj.getParamName());
|
||||||
b.append(getResource(), obj.getResource());
|
b.append(getResource(), obj.getResource());
|
||||||
b.append(getValueExact(), obj.getValueExact());
|
b.append(getValueExact(), obj.getValueExact());
|
||||||
b.append(getHashNormalizedPrefix(), obj.getHashNormalizedPrefix());
|
|
||||||
b.append(getHashExact(), obj.getHashExact());
|
b.append(getHashExact(), obj.getHashExact());
|
||||||
|
b.append(getHashNormalizedPrefix(), obj.getHashNormalizedPrefix());
|
||||||
return b.isEquals();
|
return b.isEquals();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -225,11 +240,14 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
||||||
b.append(getParamName());
|
b.append(getParamName());
|
||||||
b.append(getResource());
|
b.append(getResource());
|
||||||
b.append(getValueExact());
|
b.append(getValueExact());
|
||||||
b.append(getHashNormalizedPrefix());
|
|
||||||
b.append(getHashExact());
|
|
||||||
return b.toHashCode();
|
return b.toHashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public BaseResourceIndexedSearchParam setDaoConfig(DaoConfig theDaoConfig) {
|
||||||
|
myDaoConfig = theDaoConfig;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IQueryParameterType toQueryParameterType() {
|
public IQueryParameterType toQueryParameterType() {
|
||||||
return new StringParam(getValueExact());
|
return new StringParam(getValueExact());
|
||||||
|
@ -244,4 +262,23 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP
|
||||||
return b.build();
|
return b.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static long calculateHashExact(String theResourceType, String theParamName, String theValueExact) {
|
||||||
|
return hash(theResourceType, theParamName, theValueExact);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long calculateHashNormalized(DaoConfig theDaoConfig, String theResourceType, String theParamName, String theValueNormalized) {
|
||||||
|
/*
|
||||||
|
* If we're not allowing contained searches, we'll add the first
|
||||||
|
* bit of the normalized value to the hash. This helps to
|
||||||
|
* make the hash even more unique, which will be good for
|
||||||
|
* performance.
|
||||||
|
*/
|
||||||
|
int hashPrefixLength = HASH_PREFIX_LENGTH;
|
||||||
|
if (theDaoConfig.isAllowContainsSearches()) {
|
||||||
|
hashPrefixLength = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return hash(theResourceType, theParamName, left(theValueNormalized, hashPrefixLength));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,11 +31,23 @@ import org.hibernate.search.annotations.Field;
|
||||||
|
|
||||||
import javax.persistence.*;
|
import javax.persistence.*;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
@Embeddable
|
@Embeddable
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_SPIDX_TOKEN", indexes = {
|
@Table(name = "HFJ_SPIDX_TOKEN", indexes = {
|
||||||
@Index(name = "IDX_SP_TOKEN", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_VALUE"),
|
/*
|
||||||
@Index(name = "IDX_SP_TOKEN_UNQUAL", columnList = "RES_TYPE,SP_NAME,SP_VALUE"),
|
* Note: We previously had indexes with the following names,
|
||||||
|
* do not reuse these names:
|
||||||
|
* IDX_SP_TOKEN
|
||||||
|
* IDX_SP_TOKEN_UNQUAL
|
||||||
|
*/
|
||||||
|
@Index(name = "IDX_SP_TOKEN_HASH", columnList = "HASH_IDENTITY"),
|
||||||
|
@Index(name = "IDX_SP_TOKEN_HASH_S", columnList = "HASH_SYS"),
|
||||||
|
@Index(name = "IDX_SP_TOKEN_HASH_SV", columnList = "HASH_SYS_AND_VALUE"),
|
||||||
|
@Index(name = "IDX_SP_TOKEN_HASH_V", columnList = "HASH_VALUE"),
|
||||||
|
|
||||||
@Index(name = "IDX_SP_TOKEN_UPDATED", columnList = "SP_UPDATED"),
|
@Index(name = "IDX_SP_TOKEN_UPDATED", columnList = "SP_UPDATED"),
|
||||||
@Index(name = "IDX_SP_TOKEN_RESID", columnList = "RES_ID")
|
@Index(name = "IDX_SP_TOKEN_RESID", columnList = "RES_ID")
|
||||||
})
|
})
|
||||||
|
@ -56,6 +68,11 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
||||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_TOKEN")
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_TOKEN")
|
||||||
@Column(name = "SP_ID")
|
@Column(name = "SP_ID")
|
||||||
private Long myId;
|
private Long myId;
|
||||||
|
/**
|
||||||
|
* @since 3.4.0 - At some point this should be made not-null
|
||||||
|
*/
|
||||||
|
@Column(name = "HASH_IDENTITY", nullable = true)
|
||||||
|
private Long myHashIdentity;
|
||||||
/**
|
/**
|
||||||
* @since 3.4.0 - At some point this should be made not-null
|
* @since 3.4.0 - At some point this should be made not-null
|
||||||
*/
|
*/
|
||||||
|
@ -90,17 +107,20 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
||||||
setValue(theValue);
|
setValue(theValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@PrePersist
|
@PrePersist
|
||||||
public void calculateHashes() {
|
public void calculateHashes() {
|
||||||
if (myHashSystem == null) {
|
if (myHashSystem == null) {
|
||||||
setHashSystem(hash(getResourceType(), getParamName(), getSystem()));
|
String resourceType = getResourceType();
|
||||||
setHashSystemAndValue(hash(getResourceType(), getParamName(), getSystem(), getValue()));
|
String paramName = getParamName();
|
||||||
setHashValue(hash(getResourceType(), getParamName(), getValue()));
|
String system = getSystem();
|
||||||
|
String value = getValue();
|
||||||
|
setHashIdentity(calculateHashIdentity(resourceType, paramName));
|
||||||
|
setHashSystem(calculateHashSystem(resourceType, paramName, system));
|
||||||
|
setHashSystemAndValue(calculateHashSystemAndValue(resourceType, paramName, system, value));
|
||||||
|
setHashValue(calculateHashValue(resourceType, paramName, value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void clearHashes() {
|
protected void clearHashes() {
|
||||||
myHashSystem = null;
|
myHashSystem = null;
|
||||||
|
@ -125,6 +145,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
||||||
b.append(getResource(), obj.getResource());
|
b.append(getResource(), obj.getResource());
|
||||||
b.append(getSystem(), obj.getSystem());
|
b.append(getSystem(), obj.getSystem());
|
||||||
b.append(getValue(), obj.getValue());
|
b.append(getValue(), obj.getValue());
|
||||||
|
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||||
b.append(getHashSystem(), obj.getHashSystem());
|
b.append(getHashSystem(), obj.getHashSystem());
|
||||||
b.append(getHashSystemAndValue(), obj.getHashSystemAndValue());
|
b.append(getHashSystemAndValue(), obj.getHashSystemAndValue());
|
||||||
b.append(getHashValue(), obj.getHashValue());
|
b.append(getHashValue(), obj.getHashValue());
|
||||||
|
@ -136,6 +157,15 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
||||||
return myHashSystem;
|
return myHashSystem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Long getHashIdentity() {
|
||||||
|
calculateHashes();
|
||||||
|
return myHashIdentity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setHashIdentity(Long theHashIdentity) {
|
||||||
|
myHashIdentity = theHashIdentity;
|
||||||
|
}
|
||||||
|
|
||||||
public void setHashSystem(Long theHashSystem) {
|
public void setHashSystem(Long theHashSystem) {
|
||||||
myHashSystem = theHashSystem;
|
myHashSystem = theHashSystem;
|
||||||
}
|
}
|
||||||
|
@ -184,18 +214,15 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
|
calculateHashes();
|
||||||
HashCodeBuilder b = new HashCodeBuilder();
|
HashCodeBuilder b = new HashCodeBuilder();
|
||||||
b.append(getParamName());
|
b.append(getParamName());
|
||||||
b.append(getResource());
|
b.append(getResource());
|
||||||
b.append(getSystem());
|
b.append(getSystem());
|
||||||
b.append(getValue());
|
b.append(getValue());
|
||||||
b.append(getHashSystem());
|
|
||||||
b.append(getHashSystemAndValue());
|
|
||||||
b.append(getHashValue());
|
|
||||||
return b.toHashCode();
|
return b.toHashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IQueryParameterType toQueryParameterType() {
|
public IQueryParameterType toQueryParameterType() {
|
||||||
return new TokenParam(getSystem(), getValue());
|
return new TokenParam(getSystem(), getValue());
|
||||||
|
@ -210,4 +237,16 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
||||||
b.append("value", getValue());
|
b.append("value", getValue());
|
||||||
return b.build();
|
return b.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static long calculateHashSystem(String theResourceType, String theParamName, String theSystem) {
|
||||||
|
return hash(theResourceType, theParamName, trim(theSystem));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long calculateHashSystemAndValue(String theResourceType, String theParamName, String theSystem, String theValue) {
|
||||||
|
return hash(theResourceType, theParamName, defaultString(trim(theSystem)), trim(theValue));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long calculateHashValue(String theResourceType, String theParamName, String theValue) {
|
||||||
|
return hash(theResourceType, theParamName, trim(theValue));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,6 +34,8 @@ import javax.persistence.*;
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_SPIDX_URI", indexes = {
|
@Table(name = "HFJ_SPIDX_URI", indexes = {
|
||||||
@Index(name = "IDX_SP_URI", columnList = "RES_TYPE,SP_NAME,SP_URI"),
|
@Index(name = "IDX_SP_URI", columnList = "RES_TYPE,SP_NAME,SP_URI"),
|
||||||
|
@Index(name = "IDX_SP_URI_HASH_IDENTITY", columnList = "HASH_IDENTITY,SP_URI"),
|
||||||
|
@Index(name = "IDX_SP_URI_HASH_URI", columnList = "HASH_URI"),
|
||||||
@Index(name = "IDX_SP_URI_RESTYPE_NAME", columnList = "RES_TYPE,SP_NAME"),
|
@Index(name = "IDX_SP_URI_RESTYPE_NAME", columnList = "RES_TYPE,SP_NAME"),
|
||||||
@Index(name = "IDX_SP_URI_UPDATED", columnList = "SP_UPDATED"),
|
@Index(name = "IDX_SP_URI_UPDATED", columnList = "SP_UPDATED"),
|
||||||
@Index(name = "IDX_SP_URI_COORDS", columnList = "RES_ID")
|
@Index(name = "IDX_SP_URI_COORDS", columnList = "RES_ID")
|
||||||
|
@ -59,11 +61,17 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
|
||||||
*/
|
*/
|
||||||
@Column(name = "HASH_URI", nullable = true)
|
@Column(name = "HASH_URI", nullable = true)
|
||||||
private Long myHashUri;
|
private Long myHashUri;
|
||||||
|
/**
|
||||||
|
* @since 3.5.0 - At some point this should be made not-null
|
||||||
|
*/
|
||||||
|
@Column(name = "HASH_IDENTITY", nullable = true)
|
||||||
|
private Long myHashIdentity;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
public ResourceIndexedSearchParamUri() {
|
public ResourceIndexedSearchParamUri() {
|
||||||
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -77,7 +85,11 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
|
||||||
@PrePersist
|
@PrePersist
|
||||||
public void calculateHashes() {
|
public void calculateHashes() {
|
||||||
if (myHashUri == null) {
|
if (myHashUri == null) {
|
||||||
setHashUri(hash(getResourceType(), getParamName(), getUri()));
|
String resourceType = getResourceType();
|
||||||
|
String paramName = getParamName();
|
||||||
|
String uri = getUri();
|
||||||
|
setHashIdentity(calculateHashIdentity(resourceType, paramName));
|
||||||
|
setHashUri(calculateHashUri(resourceType, paramName, uri));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,9 +115,18 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
|
||||||
b.append(getResource(), obj.getResource());
|
b.append(getResource(), obj.getResource());
|
||||||
b.append(getUri(), obj.getUri());
|
b.append(getUri(), obj.getUri());
|
||||||
b.append(getHashUri(), obj.getHashUri());
|
b.append(getHashUri(), obj.getHashUri());
|
||||||
|
b.append(getHashIdentity(), obj.getHashIdentity());
|
||||||
return b.isEquals();
|
return b.isEquals();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Long getHashIdentity() {
|
||||||
|
return myHashIdentity;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setHashIdentity(long theHashIdentity) {
|
||||||
|
myHashIdentity = theHashIdentity;
|
||||||
|
}
|
||||||
|
|
||||||
public Long getHashUri() {
|
public Long getHashUri() {
|
||||||
calculateHashes();
|
calculateHashes();
|
||||||
return myHashUri;
|
return myHashUri;
|
||||||
|
@ -153,4 +174,8 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara
|
||||||
return b.toString();
|
return b.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static long calculateHashUri(String theResourceType, String theParamName, String theUri) {
|
||||||
|
return hash(theResourceType, theParamName, theUri);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,14 +66,12 @@ public class ResourceLink implements Serializable {
|
||||||
|
|
||||||
@ManyToOne(optional = false, fetch=FetchType.LAZY)
|
@ManyToOne(optional = false, fetch=FetchType.LAZY)
|
||||||
@JoinColumn(name = "SRC_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey=@ForeignKey(name="FK_RESLINK_SOURCE"))
|
@JoinColumn(name = "SRC_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey=@ForeignKey(name="FK_RESLINK_SOURCE"))
|
||||||
// @ContainedIn()
|
|
||||||
private ResourceTable mySourceResource;
|
private ResourceTable mySourceResource;
|
||||||
|
|
||||||
@Column(name = "SRC_RESOURCE_ID", insertable = false, updatable = false, nullable = false)
|
@Column(name = "SRC_RESOURCE_ID", insertable = false, updatable = false, nullable = false)
|
||||||
private Long mySourceResourcePid;
|
private Long mySourceResourcePid;
|
||||||
|
|
||||||
@Column(name = "SOURCE_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN)
|
@Column(name = "SOURCE_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN)
|
||||||
@ColumnDefault("''") // TODO: remove this (it's only here for simplifying upgrades of 1.3 -> 1.4)
|
|
||||||
@Field()
|
@Field()
|
||||||
private String mySourceResourceType;
|
private String mySourceResourceType;
|
||||||
|
|
||||||
|
@ -86,7 +84,6 @@ public class ResourceLink implements Serializable {
|
||||||
private Long myTargetResourcePid;
|
private Long myTargetResourcePid;
|
||||||
|
|
||||||
@Column(name = "TARGET_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN)
|
@Column(name = "TARGET_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN)
|
||||||
@ColumnDefault("''") // TODO: remove this (it's only here for simplifying upgrades of 1.3 -> 1.4)
|
|
||||||
@Field()
|
@Field()
|
||||||
private String myTargetResourceType;
|
private String myTargetResourceType;
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,200 @@
|
||||||
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2018 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.EnumType;
|
||||||
|
import javax.persistence.Enumerated;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.Lob;
|
||||||
|
import javax.persistence.Temporal;
|
||||||
|
import javax.persistence.TemporalType;
|
||||||
|
|
||||||
|
import org.hibernate.annotations.Immutable;
|
||||||
|
import org.hibernate.annotations.Subselect;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
|
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
|
||||||
|
//@formatter:off
|
||||||
|
@Entity
|
||||||
|
@Immutable
|
||||||
|
@Subselect("SELECT h.pid as pid " +
|
||||||
|
", h.res_id as res_id " +
|
||||||
|
", h.res_type as res_type " +
|
||||||
|
", h.res_version as res_version " + // FHIR version
|
||||||
|
", h.res_ver as res_ver " + // resource version
|
||||||
|
", h.has_tags as has_tags " +
|
||||||
|
", h.res_deleted_at as res_deleted_at " +
|
||||||
|
", h.res_published as res_published " +
|
||||||
|
", h.res_updated as res_updated " +
|
||||||
|
", h.res_text as res_text " +
|
||||||
|
", h.res_encoding as res_encoding " +
|
||||||
|
", f.forced_id as forced_pid " +
|
||||||
|
"FROM HFJ_RES_VER h "
|
||||||
|
+ " LEFT OUTER JOIN HFJ_FORCED_ID f ON f.resource_pid = h.res_id "
|
||||||
|
+ " INNER JOIN HFJ_RESOURCE r ON r.res_id = h.res_id and r.res_ver = h.res_ver")
|
||||||
|
// @formatter:on
|
||||||
|
public class ResourceSearchView implements IBaseResourceEntity, Serializable {
|
||||||
|
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@Column(name = "PID")
|
||||||
|
private Long myId;
|
||||||
|
|
||||||
|
@Column(name = "RES_ID")
|
||||||
|
private Long myResourceId;
|
||||||
|
|
||||||
|
@Column(name = "RES_TYPE")
|
||||||
|
private String myResourceType;
|
||||||
|
|
||||||
|
@Column(name = "RES_VERSION")
|
||||||
|
@Enumerated(EnumType.STRING)
|
||||||
|
private FhirVersionEnum myFhirVersion;
|
||||||
|
|
||||||
|
@Column(name = "RES_VER")
|
||||||
|
private Long myResourceVersion;
|
||||||
|
|
||||||
|
@Column(name = "HAS_TAGS")
|
||||||
|
private boolean myHasTags;
|
||||||
|
|
||||||
|
@Column(name = "RES_DELETED_AT")
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
private Date myDeleted;
|
||||||
|
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@Column(name = "RES_PUBLISHED")
|
||||||
|
private Date myPublished;
|
||||||
|
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@Column(name = "RES_UPDATED")
|
||||||
|
private Date myUpdated;
|
||||||
|
|
||||||
|
@Column(name = "RES_TEXT")
|
||||||
|
@Lob()
|
||||||
|
private byte[] myResource;
|
||||||
|
|
||||||
|
@Column(name = "RES_ENCODING")
|
||||||
|
@Enumerated(EnumType.STRING)
|
||||||
|
private ResourceEncodingEnum myEncoding;
|
||||||
|
|
||||||
|
@Column(name = "forced_pid")
|
||||||
|
private String myForcedPid;
|
||||||
|
|
||||||
|
public ResourceSearchView() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Date getDeleted() {
|
||||||
|
return myDeleted;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDeleted(Date theDate) {
|
||||||
|
myDeleted = theDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FhirVersionEnum getFhirVersion() {
|
||||||
|
return myFhirVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setFhirVersion(FhirVersionEnum theFhirVersion) {
|
||||||
|
myFhirVersion = theFhirVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getForcedId() {
|
||||||
|
return myForcedPid;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Long getId() {
|
||||||
|
return myResourceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IdDt getIdDt() {
|
||||||
|
if (myForcedPid == null) {
|
||||||
|
Long id = myResourceId;
|
||||||
|
return new IdDt(myResourceType + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
|
||||||
|
} else {
|
||||||
|
return new IdDt(
|
||||||
|
getResourceType() + '/' + getForcedId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public InstantDt getPublished() {
|
||||||
|
if (myPublished != null) {
|
||||||
|
return new InstantDt(myPublished);
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPublished(Date thePublished) {
|
||||||
|
myPublished = thePublished;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Long getResourceId() {
|
||||||
|
return myResourceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getResourceType() {
|
||||||
|
return myResourceType;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public InstantDt getUpdated() {
|
||||||
|
return new InstantDt(myUpdated);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Date getUpdatedDate() {
|
||||||
|
return myUpdated;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getVersion() {
|
||||||
|
return myResourceVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isHasTags() {
|
||||||
|
return myHasTags;
|
||||||
|
}
|
||||||
|
|
||||||
|
public byte[] getResource() {
|
||||||
|
return myResource;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ResourceEncodingEnum getEncoding() {
|
||||||
|
return myEncoding;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -94,10 +94,9 @@ public class Search implements Serializable {
|
||||||
@OneToMany(mappedBy="mySearch")
|
@OneToMany(mappedBy="mySearch")
|
||||||
private Collection<SearchResult> myResults;
|
private Collection<SearchResult> myResults;
|
||||||
|
|
||||||
// TODO: change nullable to false after 2.5
|
|
||||||
@NotNull
|
@NotNull
|
||||||
@Temporal(TemporalType.TIMESTAMP)
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
@Column(name="SEARCH_LAST_RETURNED", nullable=true, updatable=false)
|
@Column(name="SEARCH_LAST_RETURNED", nullable=false, updatable=false)
|
||||||
private Date mySearchLastReturned;
|
private Date mySearchLastReturned;
|
||||||
|
|
||||||
@Lob()
|
@Lob()
|
||||||
|
|
|
@ -1,59 +0,0 @@
|
||||||
package ca.uhn.fhir.jpa.entity;
|
|
||||||
|
|
||||||
/*-
|
|
||||||
* #%L
|
|
||||||
* HAPI FHIR JPA Server
|
|
||||||
* %%
|
|
||||||
* Copyright (C) 2014 - 2018 University Health Network
|
|
||||||
* %%
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
* #L%
|
|
||||||
*/
|
|
||||||
|
|
||||||
import javax.persistence.*;
|
|
||||||
|
|
||||||
@Entity
|
|
||||||
@Table(name = "HFJ_SEARCH_PARM", uniqueConstraints= {
|
|
||||||
@UniqueConstraint(name="IDX_SEARCHPARM_RESTYPE_SPNAME", columnNames= {"RES_TYPE", "PARAM_NAME"})
|
|
||||||
})
|
|
||||||
public class SearchParam {
|
|
||||||
|
|
||||||
@Id
|
|
||||||
@SequenceGenerator(name = "SEQ_SEARCHPARM_ID", sequenceName = "SEQ_SEARCHPARM_ID")
|
|
||||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SEARCHPARM_ID")
|
|
||||||
@Column(name = "PID")
|
|
||||||
private Long myId;
|
|
||||||
|
|
||||||
@Column(name="PARAM_NAME", length=BaseResourceIndexedSearchParam.MAX_SP_NAME, nullable=false, updatable=false)
|
|
||||||
private String myParamName;
|
|
||||||
|
|
||||||
@Column(name="RES_TYPE", length=ResourceTable.RESTYPE_LEN, nullable=false, updatable=false)
|
|
||||||
private String myResourceName;
|
|
||||||
|
|
||||||
public String getParamName() {
|
|
||||||
return myParamName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setParamName(String theParamName) {
|
|
||||||
myParamName = theParamName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setResourceName(String theResourceName) {
|
|
||||||
myResourceName = theResourceName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Long getId() {
|
|
||||||
return myId;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -20,18 +20,16 @@ package ca.uhn.fhir.jpa.entity;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
import javax.persistence.*;
|
|
||||||
|
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "HFJ_RES_PARAM_PRESENT", indexes = {
|
@Table(name = "HFJ_RES_PARAM_PRESENT", indexes = {
|
||||||
@Index(name = "IDX_RESPARMPRESENT_RESID", columnList = "RES_ID")
|
@Index(name = "IDX_RESPARMPRESENT_RESID", columnList = "RES_ID"),
|
||||||
}, uniqueConstraints = {
|
@Index(name = "IDX_RESPARMPRESENT_HASHPRES", columnList = "HASH_PRESENCE")
|
||||||
@UniqueConstraint(name = "IDX_RESPARMPRESENT_SPID_RESID", columnNames = { "SP_ID", "RES_ID" })
|
|
||||||
})
|
})
|
||||||
public class SearchParamPresent implements Serializable {
|
public class SearchParamPresent implements Serializable {
|
||||||
|
|
||||||
|
@ -42,17 +40,15 @@ public class SearchParamPresent implements Serializable {
|
||||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESPARMPRESENT_ID")
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESPARMPRESENT_ID")
|
||||||
@Column(name = "PID")
|
@Column(name = "PID")
|
||||||
private Long myId;
|
private Long myId;
|
||||||
|
|
||||||
@Column(name = "SP_PRESENT", nullable = false)
|
@Column(name = "SP_PRESENT", nullable = false)
|
||||||
private boolean myPresent;
|
private boolean myPresent;
|
||||||
|
|
||||||
@ManyToOne()
|
@ManyToOne()
|
||||||
@JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESPARMPRES_RESID"))
|
@JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESPARMPRES_RESID"))
|
||||||
private ResourceTable myResource;
|
private ResourceTable myResource;
|
||||||
|
@Transient
|
||||||
@ManyToOne()
|
private transient String myParamName;
|
||||||
@JoinColumn(name = "SP_ID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESPARMPRES_SPID"))
|
@Column(name = "HASH_PRESENCE")
|
||||||
private SearchParam mySearchParam;
|
private Long myHashPresence;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
|
@ -60,13 +56,40 @@ public class SearchParamPresent implements Serializable {
|
||||||
public SearchParamPresent() {
|
public SearchParamPresent() {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
@PrePersist
|
||||||
|
public void calculateHashes() {
|
||||||
|
if (myHashPresence == null) {
|
||||||
|
String resourceType = getResource().getResourceType();
|
||||||
|
String paramName = getParamName();
|
||||||
|
boolean present = myPresent;
|
||||||
|
setHashPresence(calculateHashPresence(resourceType, paramName, present));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getHashPresence() {
|
||||||
|
return myHashPresence;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setHashPresence(Long theHashPresence) {
|
||||||
|
myHashPresence = theHashPresence;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getParamName() {
|
||||||
|
return myParamName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setParamName(String theParamName) {
|
||||||
|
myParamName = theParamName;
|
||||||
|
}
|
||||||
|
|
||||||
public ResourceTable getResource() {
|
public ResourceTable getResource() {
|
||||||
return myResource;
|
return myResource;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchParam getSearchParam() {
|
public void setResource(ResourceTable theResourceTable) {
|
||||||
return mySearchParam;
|
myResource = theResourceTable;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isPresent() {
|
public boolean isPresent() {
|
||||||
|
@ -77,22 +100,18 @@ public class SearchParamPresent implements Serializable {
|
||||||
myPresent = thePresent;
|
myPresent = thePresent;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setResource(ResourceTable theResourceTable) {
|
|
||||||
myResource = theResourceTable;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSearchParam(SearchParam theSearchParam) {
|
|
||||||
mySearchParam = theSearchParam;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
|
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
|
||||||
|
|
||||||
b.append("res_pid", myResource.getIdDt().toUnqualifiedVersionless().getValue());
|
b.append("resPid", myResource.getIdDt().toUnqualifiedVersionless().getValue());
|
||||||
b.append("param", mySearchParam.getParamName());
|
b.append("paramName", myParamName);
|
||||||
b.append("present", myPresent);
|
b.append("present", myPresent);
|
||||||
return b.build();
|
return b.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static long calculateHashPresence(String theResourceType, String theParamName, boolean thePresent) {
|
||||||
|
return BaseResourceIndexedSearchParam.hash(theResourceType, theParamName, Boolean.toString(thePresent));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,12 +77,12 @@ public class TermConcept implements Serializable {
|
||||||
})
|
})
|
||||||
private String myDisplay;
|
private String myDisplay;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myConcept", orphanRemoval = true)
|
@OneToMany(mappedBy = "myConcept", orphanRemoval = false)
|
||||||
@Field
|
@Field(name = "PROPmyProperties", analyzer = @Analyzer(definition = "termConceptPropertyAnalyzer"))
|
||||||
@FieldBridge(impl = TermConceptPropertyFieldBridge.class)
|
@FieldBridge(impl = TermConceptPropertyFieldBridge.class)
|
||||||
private Collection<TermConceptProperty> myProperties;
|
private Collection<TermConceptProperty> myProperties;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myConcept", orphanRemoval = true)
|
@OneToMany(mappedBy = "myConcept", orphanRemoval = false)
|
||||||
private Collection<TermConceptDesignation> myDesignations;
|
private Collection<TermConceptDesignation> myDesignations;
|
||||||
|
|
||||||
@Id()
|
@Id()
|
||||||
|
@ -130,6 +130,7 @@ public class TermConcept implements Serializable {
|
||||||
public TermConceptDesignation addDesignation() {
|
public TermConceptDesignation addDesignation() {
|
||||||
TermConceptDesignation designation = new TermConceptDesignation();
|
TermConceptDesignation designation = new TermConceptDesignation();
|
||||||
designation.setConcept(this);
|
designation.setConcept(this);
|
||||||
|
designation.setCodeSystemVersion(myCodeSystem);
|
||||||
getDesignations().add(designation);
|
getDesignations().add(designation);
|
||||||
return designation;
|
return designation;
|
||||||
}
|
}
|
||||||
|
@ -139,6 +140,7 @@ public class TermConcept implements Serializable {
|
||||||
|
|
||||||
TermConceptProperty property = new TermConceptProperty();
|
TermConceptProperty property = new TermConceptProperty();
|
||||||
property.setConcept(this);
|
property.setConcept(this);
|
||||||
|
property.setCodeSystemVersion(myCodeSystem);
|
||||||
property.setType(thePropertyType);
|
property.setType(thePropertyType);
|
||||||
property.setKey(thePropertyName);
|
property.setKey(thePropertyName);
|
||||||
property.setValue(thePropertyValue);
|
property.setValue(thePropertyValue);
|
||||||
|
|
|
@ -48,6 +48,14 @@ public class TermConceptDesignation implements Serializable {
|
||||||
private String myUseDisplay;
|
private String myUseDisplay;
|
||||||
@Column(name = "VAL", length = 500, nullable = false)
|
@Column(name = "VAL", length = 500, nullable = false)
|
||||||
private String myValue;
|
private String myValue;
|
||||||
|
/**
|
||||||
|
* TODO: Make this non-null
|
||||||
|
*
|
||||||
|
* @since 3.5.0
|
||||||
|
*/
|
||||||
|
@ManyToOne
|
||||||
|
@JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTDESIG_CSV"))
|
||||||
|
private TermCodeSystemVersion myCodeSystemVersion;
|
||||||
|
|
||||||
public String getLanguage() {
|
public String getLanguage() {
|
||||||
return myLanguage;
|
return myLanguage;
|
||||||
|
@ -94,6 +102,11 @@ public class TermConceptDesignation implements Serializable {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public TermConceptDesignation setCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
|
||||||
|
myCodeSystemVersion = theCodeSystemVersion;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public TermConceptDesignation setConcept(TermConcept theConcept) {
|
public TermConceptDesignation setConcept(TermConcept theConcept) {
|
||||||
myConcept = theConcept;
|
myConcept = theConcept;
|
||||||
return this;
|
return this;
|
||||||
|
|
|
@ -38,6 +38,14 @@ public class TermConceptProperty implements Serializable {
|
||||||
@ManyToOne
|
@ManyToOne
|
||||||
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT"))
|
@JoinColumn(name = "CONCEPT_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CONCEPT"))
|
||||||
private TermConcept myConcept;
|
private TermConcept myConcept;
|
||||||
|
/**
|
||||||
|
* TODO: Make this non-null
|
||||||
|
*
|
||||||
|
* @since 3.5.0
|
||||||
|
*/
|
||||||
|
@ManyToOne
|
||||||
|
@JoinColumn(name = "CS_VER_PID", nullable = true, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPTPROP_CSV"))
|
||||||
|
private TermCodeSystemVersion myCodeSystemVersion;
|
||||||
@Id()
|
@Id()
|
||||||
@SequenceGenerator(name = "SEQ_CONCEPT_PROP_PID", sequenceName = "SEQ_CONCEPT_PROP_PID")
|
@SequenceGenerator(name = "SEQ_CONCEPT_PROP_PID", sequenceName = "SEQ_CONCEPT_PROP_PID")
|
||||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PROP_PID")
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PROP_PID")
|
||||||
|
@ -124,6 +132,11 @@ public class TermConceptProperty implements Serializable {
|
||||||
myValue = theValue;
|
myValue = theValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public TermConceptProperty setCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
|
||||||
|
myCodeSystemVersion = theCodeSystemVersion;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public void setConcept(TermConcept theConcept) {
|
public void setConcept(TermConcept theConcept) {
|
||||||
myConcept = theConcept;
|
myConcept = theConcept;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,18 +21,22 @@ package ca.uhn.fhir.jpa.entity;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.StringField;
|
||||||
import org.hibernate.search.bridge.FieldBridge;
|
import org.hibernate.search.bridge.FieldBridge;
|
||||||
import org.hibernate.search.bridge.LuceneOptions;
|
import org.hibernate.search.bridge.LuceneOptions;
|
||||||
import org.hibernate.search.bridge.StringBridge;
|
import org.hibernate.search.bridge.StringBridge;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Allows hibernate search to index individual concepts' properties
|
* Allows hibernate search to index individual concepts' properties
|
||||||
*/
|
*/
|
||||||
public class TermConceptPropertyFieldBridge implements FieldBridge, StringBridge {
|
public class TermConceptPropertyFieldBridge implements FieldBridge, StringBridge {
|
||||||
|
|
||||||
public static final String PROP_PREFIX = "PROP__";
|
public static final String CONCEPT_FIELD_PROPERTY_PREFIX = "PROP";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
|
@ -48,15 +52,17 @@ public class TermConceptPropertyFieldBridge implements FieldBridge, StringBridge
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void set(String theName, Object theValue, Document theDocument, LuceneOptions theLuceneOptions) {
|
public void set(String theName, Object theValue, Document theDocument, LuceneOptions theLuceneOptions) {
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
Collection<TermConceptProperty> properties = (Collection<TermConceptProperty>) theValue;
|
Collection<TermConceptProperty> properties = (Collection<TermConceptProperty>) theValue;
|
||||||
|
|
||||||
if (properties != null) {
|
if (properties != null) {
|
||||||
for (TermConceptProperty next : properties) {
|
for (TermConceptProperty next : properties) {
|
||||||
String propValue = next.getKey() + "=" + next.getValue();
|
theDocument.add(new StringField(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getValue(), Field.Store.YES));
|
||||||
theLuceneOptions.addFieldToDocument(theName, propValue, theDocument);
|
|
||||||
|
|
||||||
if (next.getType() == TermConceptPropertyTypeEnum.CODING) {
|
if (next.getType() == TermConceptPropertyTypeEnum.CODING) {
|
||||||
propValue = next.getKey() + "=" + next.getDisplay();
|
if (isNotBlank(next.getDisplay())) {
|
||||||
theLuceneOptions.addFieldToDocument(theName, propValue, theDocument);
|
theDocument.add(new StringField(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getDisplay(), Field.Store.YES));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,7 +66,8 @@ public class LuceneSearchMappingFactory {
|
||||||
.analyzerDef("standardAnalyzer", StandardTokenizerFactory.class)
|
.analyzerDef("standardAnalyzer", StandardTokenizerFactory.class)
|
||||||
.filter(LowerCaseFilterFactory.class)
|
.filter(LowerCaseFilterFactory.class)
|
||||||
.analyzerDef("exactAnalyzer", StandardTokenizerFactory.class)
|
.analyzerDef("exactAnalyzer", StandardTokenizerFactory.class)
|
||||||
.analyzerDef("conceptParentPidsAnalyzer", WhitespaceTokenizerFactory.class);
|
.analyzerDef("conceptParentPidsAnalyzer", WhitespaceTokenizerFactory.class)
|
||||||
|
.analyzerDef("termConceptPropertyAnalyzer", WhitespaceTokenizerFactory.class);
|
||||||
|
|
||||||
return mapping;
|
return mapping;
|
||||||
}
|
}
|
||||||
|
|
|
@ -178,9 +178,9 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
@Override
|
@Override
|
||||||
public List<Long> doInTransaction(TransactionStatus theStatus) {
|
public List<Long> doInTransaction(TransactionStatus theStatus) {
|
||||||
final List<Long> resultPids = new ArrayList<Long>();
|
final List<Long> resultPids = new ArrayList<Long>();
|
||||||
Page<SearchResult> searchResults = mySearchResultDao.findWithSearchUuid(foundSearch, page);
|
Page<Long> searchResultPids = mySearchResultDao.findWithSearchUuid(foundSearch, page);
|
||||||
for (SearchResult next : searchResults) {
|
for (Long next : searchResultPids) {
|
||||||
resultPids.add(next.getResourcePid());
|
resultPids.add(next);
|
||||||
}
|
}
|
||||||
return resultPids;
|
return resultPids;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,14 +20,12 @@ package ca.uhn.fhir.jpa.sp;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
public interface ISearchParamPresenceSvc {
|
public interface ISearchParamPresenceSvc {
|
||||||
|
|
||||||
void updatePresence(ResourceTable theResource, Map<String, Boolean> theParamNameToPresence);
|
void updatePresence(ResourceTable theResource, Map<String, Boolean> theParamNameToPresence);
|
||||||
|
|
||||||
void flushCachesForUnitTest();
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,29 +20,17 @@ package ca.uhn.fhir.jpa.sp;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.util.*;
|
|
||||||
import java.util.Map.Entry;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import org.apache.commons.lang3.tuple.Pair;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.ISearchParamDao;
|
|
||||||
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
|
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||||
import ca.uhn.fhir.jpa.entity.SearchParam;
|
|
||||||
import ca.uhn.fhir.jpa.entity.SearchParamPresent;
|
import ca.uhn.fhir.jpa.entity.SearchParamPresent;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.Map.Entry;
|
||||||
|
|
||||||
public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc {
|
public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc {
|
||||||
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchParamPresenceSvcImpl.class);
|
|
||||||
|
|
||||||
private Map<Pair<String, String>, SearchParam> myResourceTypeToSearchParamToEntity = new ConcurrentHashMap<Pair<String, String>, SearchParam>();
|
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private ISearchParamDao mySearchParamDao;
|
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private ISearchParamPresentDao mySearchParamPresentDao;
|
private ISearchParamPresentDao mySearchParamPresentDao;
|
||||||
|
|
||||||
|
@ -55,62 +43,48 @@ public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, Boolean> presenceMap = new HashMap<String, Boolean>(theParamNameToPresence);
|
Map<String, Boolean> presenceMap = new HashMap<>(theParamNameToPresence);
|
||||||
List<SearchParamPresent> entitiesToSave = new ArrayList<SearchParamPresent>();
|
|
||||||
List<SearchParamPresent> entitiesToDelete = new ArrayList<SearchParamPresent>();
|
|
||||||
|
|
||||||
|
// Find existing entries
|
||||||
Collection<SearchParamPresent> existing;
|
Collection<SearchParamPresent> existing;
|
||||||
existing = mySearchParamPresentDao.findAllForResource(theResource);
|
existing = mySearchParamPresentDao.findAllForResource(theResource);
|
||||||
|
Map<Long, SearchParamPresent> existingHashToPresence = new HashMap<>();
|
||||||
for (SearchParamPresent nextExistingEntity : existing) {
|
for (SearchParamPresent nextExistingEntity : existing) {
|
||||||
String nextSearchParamName = nextExistingEntity.getSearchParam().getParamName();
|
existingHashToPresence.put(nextExistingEntity.getHashPresence(), nextExistingEntity);
|
||||||
Boolean existingValue = presenceMap.remove(nextSearchParamName);
|
|
||||||
if (existingValue == null) {
|
|
||||||
entitiesToDelete.add(nextExistingEntity);
|
|
||||||
} else if (existingValue.booleanValue() == nextExistingEntity.isPresent()) {
|
|
||||||
ourLog.trace("No change for search param {}", nextSearchParamName);
|
|
||||||
} else {
|
|
||||||
nextExistingEntity.setPresent(existingValue);
|
|
||||||
entitiesToSave.add(nextExistingEntity);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Find newly wanted set of entries
|
||||||
|
Map<Long, SearchParamPresent> newHashToPresence = new HashMap<>();
|
||||||
for (Entry<String, Boolean> next : presenceMap.entrySet()) {
|
for (Entry<String, Boolean> next : presenceMap.entrySet()) {
|
||||||
String resourceType = theResource.getResourceType();
|
|
||||||
String paramName = next.getKey();
|
String paramName = next.getKey();
|
||||||
Pair<String, String> key = Pair.of(resourceType, paramName);
|
|
||||||
|
|
||||||
SearchParam searchParam = myResourceTypeToSearchParamToEntity.get(key);
|
|
||||||
if (searchParam == null) {
|
|
||||||
searchParam = mySearchParamDao.findForResource(resourceType, paramName);
|
|
||||||
if (searchParam != null) {
|
|
||||||
myResourceTypeToSearchParamToEntity.put(key, searchParam);
|
|
||||||
} else {
|
|
||||||
searchParam = new SearchParam();
|
|
||||||
searchParam.setResourceName(resourceType);
|
|
||||||
searchParam.setParamName(paramName);
|
|
||||||
searchParam = mySearchParamDao.save(searchParam);
|
|
||||||
ourLog.info("Added search param {} with pid {}", paramName, searchParam.getId());
|
|
||||||
// Don't add the newly saved entity to the map in case the save fails
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
SearchParamPresent present = new SearchParamPresent();
|
SearchParamPresent present = new SearchParamPresent();
|
||||||
present.setResource(theResource);
|
present.setResource(theResource);
|
||||||
present.setSearchParam(searchParam);
|
present.setParamName(paramName);
|
||||||
present.setPresent(next.getValue());
|
present.setPresent(next.getValue());
|
||||||
entitiesToSave.add(present);
|
present.calculateHashes();
|
||||||
|
|
||||||
|
newHashToPresence.put(present.getHashPresence(), present);
|
||||||
}
|
}
|
||||||
|
|
||||||
mySearchParamPresentDao.deleteInBatch(entitiesToDelete);
|
// Delete any that should be deleted
|
||||||
mySearchParamPresentDao.saveAll(entitiesToSave);
|
List<SearchParamPresent> toDelete = new ArrayList<>();
|
||||||
|
for (Entry<Long, SearchParamPresent> nextEntry : existingHashToPresence.entrySet()) {
|
||||||
|
if (newHashToPresence.containsKey(nextEntry.getKey()) == false) {
|
||||||
|
toDelete.add(nextEntry.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mySearchParamPresentDao.deleteInBatch(toDelete);
|
||||||
|
|
||||||
}
|
// Add any that should be added
|
||||||
|
List<SearchParamPresent> toAdd = new ArrayList<>();
|
||||||
|
for (Entry<Long, SearchParamPresent> nextEntry : newHashToPresence.entrySet()) {
|
||||||
|
if (existingHashToPresence.containsKey(nextEntry.getKey()) == false) {
|
||||||
|
toAdd.add(nextEntry.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mySearchParamPresentDao.saveAll(toAdd);
|
||||||
|
|
||||||
@Override
|
|
||||||
public void flushCachesForUnitTest() {
|
|
||||||
myResourceTypeToSearchParamToEntity.clear();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,9 +40,13 @@ import com.github.benmanes.caffeine.cache.Caffeine;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Stopwatch;
|
import com.google.common.base.Stopwatch;
|
||||||
import com.google.common.collect.ArrayListMultimap;
|
import com.google.common.collect.ArrayListMultimap;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.apache.commons.lang3.time.DateUtils;
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.queries.TermsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.RegexpQuery;
|
||||||
import org.hibernate.ScrollMode;
|
import org.hibernate.ScrollMode;
|
||||||
import org.hibernate.ScrollableResults;
|
import org.hibernate.ScrollableResults;
|
||||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||||
|
@ -61,6 +65,7 @@ import org.springframework.context.ApplicationContext;
|
||||||
import org.springframework.context.ApplicationContextAware;
|
import org.springframework.context.ApplicationContextAware;
|
||||||
import org.springframework.data.domain.Page;
|
import org.springframework.data.domain.Page;
|
||||||
import org.springframework.data.domain.PageRequest;
|
import org.springframework.data.domain.PageRequest;
|
||||||
|
import org.springframework.data.domain.Slice;
|
||||||
import org.springframework.scheduling.annotation.Scheduled;
|
import org.springframework.scheduling.annotation.Scheduled;
|
||||||
import org.springframework.transaction.PlatformTransactionManager;
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
import org.springframework.transaction.TransactionStatus;
|
import org.springframework.transaction.TransactionStatus;
|
||||||
|
@ -131,10 +136,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
private int myFetchSize = DEFAULT_FETCH_SIZE;
|
private int myFetchSize = DEFAULT_FETCH_SIZE;
|
||||||
private ApplicationContext myApplicationContext;
|
private ApplicationContext myApplicationContext;
|
||||||
|
|
||||||
private void addCodeIfNotAlreadyAdded(String theCodeSystem, ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, TermConcept theConcept) {
|
/**
|
||||||
if (theAddedCodes.add(theConcept.getCode())) {
|
* @param theAdd If true, add the code. If false, remove the code.
|
||||||
|
*/
|
||||||
|
private void addCodeIfNotAlreadyAdded(String theCodeSystem, ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, TermConcept theConcept, boolean theAdd) {
|
||||||
|
String code = theConcept.getCode();
|
||||||
|
if (theAdd && theAddedCodes.add(code)) {
|
||||||
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
|
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
|
||||||
contains.setCode(theConcept.getCode());
|
contains.setCode(code);
|
||||||
contains.setSystem(theCodeSystem);
|
contains.setSystem(theCodeSystem);
|
||||||
contains.setDisplay(theConcept.getDisplay());
|
contains.setDisplay(theConcept.getDisplay());
|
||||||
for (TermConceptDesignation nextDesignation : theConcept.getDesignations()) {
|
for (TermConceptDesignation nextDesignation : theConcept.getDesignations()) {
|
||||||
|
@ -147,18 +156,24 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
.setDisplay(nextDesignation.getUseDisplay());
|
.setDisplay(nextDesignation.getUseDisplay());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!theAdd && theAddedCodes.remove(code)) {
|
||||||
|
removeCodeFromExpansion(theCodeSystem, code, theExpansionComponent);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addConceptsToList(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, String theSystem, List<CodeSystem.ConceptDefinitionComponent> theConcept) {
|
private void addConceptsToList(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, String theSystem, List<CodeSystem.ConceptDefinitionComponent> theConcept, boolean theAdd) {
|
||||||
for (CodeSystem.ConceptDefinitionComponent next : theConcept) {
|
for (CodeSystem.ConceptDefinitionComponent next : theConcept) {
|
||||||
if (!theAddedCodes.contains(next.getCode())) {
|
if (theAdd && theAddedCodes.add(next.getCode())) {
|
||||||
theAddedCodes.add(next.getCode());
|
|
||||||
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
|
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
|
||||||
contains.setCode(next.getCode());
|
contains.setCode(next.getCode());
|
||||||
contains.setSystem(theSystem);
|
contains.setSystem(theSystem);
|
||||||
contains.setDisplay(next.getDisplay());
|
contains.setDisplay(next.getDisplay());
|
||||||
}
|
}
|
||||||
addConceptsToList(theExpansionComponent, theAddedCodes, theSystem, next.getConcept());
|
if (!theAdd && theAddedCodes.remove(next.getCode())) {
|
||||||
|
removeCodeFromExpansion(theSystem, next.getCode(), theExpansionComponent);
|
||||||
|
}
|
||||||
|
addConceptsToList(theExpansionComponent, theAddedCodes, theSystem, next.getConcept(), theAdd);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -250,21 +265,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
|
|
||||||
int i = 0;
|
int i = 0;
|
||||||
for (TermCodeSystemVersion next : myCodeSystemVersionDao.findByCodeSystemResource(theCodeSystem.getPid())) {
|
for (TermCodeSystemVersion next : myCodeSystemVersionDao.findByCodeSystemResource(theCodeSystem.getPid())) {
|
||||||
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
|
deleteCodeSystemVersion(next.getPid());
|
||||||
for (TermConcept nextConcept : myConceptDao.findByCodeSystemVersion(next.getPid())) {
|
|
||||||
myConceptPropertyDao.deleteAll(nextConcept.getProperties());
|
|
||||||
myConceptDesignationDao.deleteAll(nextConcept.getDesignations());
|
|
||||||
myConceptDao.delete(nextConcept);
|
|
||||||
}
|
|
||||||
if (next.getCodeSystem().getCurrentVersion() == next) {
|
|
||||||
next.getCodeSystem().setCurrentVersion(null);
|
|
||||||
myCodeSystemDao.save(next.getCodeSystem());
|
|
||||||
}
|
|
||||||
myCodeSystemVersionDao.delete(next);
|
|
||||||
|
|
||||||
if (i++ % 1000 == 0) {
|
|
||||||
myEntityManager.flush();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
|
myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
|
||||||
myCodeSystemDao.delete(theCodeSystem);
|
myCodeSystemDao.delete(theCodeSystem);
|
||||||
|
@ -272,6 +273,130 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
myEntityManager.flush();
|
myEntityManager.flush();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void deleteCodeSystemVersion(Long theCodeSystemVersionPid) {
|
||||||
|
ourLog.info(" * Deleting code system version {}", theCodeSystemVersionPid);
|
||||||
|
|
||||||
|
PageRequest page = PageRequest.of(0, 1000);
|
||||||
|
int count;
|
||||||
|
|
||||||
|
// Parent/Child links
|
||||||
|
ourLog.info(" * Deleting parent/child links");
|
||||||
|
count = 0;
|
||||||
|
while (true) {
|
||||||
|
Slice<TermConceptParentChildLink> link = myConceptParentChildLinkDao.findByCodeSystemVersion(page, theCodeSystemVersionPid);
|
||||||
|
if (link.hasContent() == false) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
myConceptParentChildLinkDao.deleteInBatch(link);
|
||||||
|
|
||||||
|
count += link.getNumberOfElements();
|
||||||
|
ourLog.info(" * {} parent/child links deleted", count);
|
||||||
|
}
|
||||||
|
myConceptParentChildLinkDao.flush();
|
||||||
|
|
||||||
|
// Properties
|
||||||
|
ourLog.info(" * Deleting properties");
|
||||||
|
count = 0;
|
||||||
|
while (true) {
|
||||||
|
Slice<TermConceptProperty> link = myConceptPropertyDao.findByCodeSystemVersion(page, theCodeSystemVersionPid);
|
||||||
|
if (link.hasContent() == false) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
myConceptPropertyDao.deleteInBatch(link);
|
||||||
|
|
||||||
|
count += link.getNumberOfElements();
|
||||||
|
ourLog.info(" * {} concept properties deleted", count);
|
||||||
|
}
|
||||||
|
myConceptPropertyDao.flush();
|
||||||
|
|
||||||
|
// Properties
|
||||||
|
ourLog.info(" * Deleting designations");
|
||||||
|
count = 0;
|
||||||
|
while (true) {
|
||||||
|
Slice<TermConceptDesignation> link = myConceptDesignationDao.findByCodeSystemVersion(page, theCodeSystemVersionPid);
|
||||||
|
if (link.hasContent() == false) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
myConceptDesignationDao.deleteInBatch(link);
|
||||||
|
|
||||||
|
count += link.getNumberOfElements();
|
||||||
|
ourLog.info(" * {} concept designations deleted", count);
|
||||||
|
}
|
||||||
|
myConceptDesignationDao.flush();
|
||||||
|
|
||||||
|
// Concepts
|
||||||
|
ourLog.info(" * Deleting concepts");
|
||||||
|
count = 0;
|
||||||
|
while (true) {
|
||||||
|
Slice<TermConcept> link = myConceptDao.findByCodeSystemVersion(page, theCodeSystemVersionPid);
|
||||||
|
if (link.hasContent() == false) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
myConceptDao.deleteInBatch(link);
|
||||||
|
myConceptDao.flush();
|
||||||
|
|
||||||
|
count += link.getNumberOfElements();
|
||||||
|
ourLog.info(" * {} concepts deleted", count);
|
||||||
|
}
|
||||||
|
|
||||||
|
Optional<TermCodeSystem> codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
|
||||||
|
if (codeSystemOpt.isPresent()) {
|
||||||
|
TermCodeSystem codeSystem = codeSystemOpt.get();
|
||||||
|
ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid());
|
||||||
|
codeSystem.setCurrentVersion(null);
|
||||||
|
myCodeSystemDao.save(codeSystem);
|
||||||
|
}
|
||||||
|
|
||||||
|
ourLog.info(" * Deleting code system version");
|
||||||
|
myCodeSystemVersionDao.deleteById(theCodeSystemVersionPid);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void deleteConceptMap(ResourceTable theResourceTable) {
|
||||||
|
// Get existing entity so it can be deleted.
|
||||||
|
Optional<TermConceptMap> optionalExistingTermConceptMapById = myConceptMapDao.findTermConceptMapByResourcePid(theResourceTable.getId());
|
||||||
|
|
||||||
|
if (optionalExistingTermConceptMapById.isPresent()) {
|
||||||
|
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get();
|
||||||
|
|
||||||
|
ourLog.info("Deleting existing TermConceptMap {} and its children...", existingTermConceptMap.getId());
|
||||||
|
for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) {
|
||||||
|
|
||||||
|
for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) {
|
||||||
|
|
||||||
|
for (TermConceptMapGroupElementTarget target : element.getConceptMapGroupElementTargets()) {
|
||||||
|
|
||||||
|
myConceptMapGroupElementTargetDao.deleteTermConceptMapGroupElementTargetById(target.getId());
|
||||||
|
}
|
||||||
|
|
||||||
|
myConceptMapGroupElementDao.deleteTermConceptMapGroupElementById(element.getId());
|
||||||
|
}
|
||||||
|
|
||||||
|
myConceptMapGroupDao.deleteTermConceptMapGroupById(group.getId());
|
||||||
|
}
|
||||||
|
|
||||||
|
myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId());
|
||||||
|
ourLog.info("Done deleting existing TermConceptMap {} and its children.", existingTermConceptMap.getId());
|
||||||
|
|
||||||
|
ourLog.info("Flushing...");
|
||||||
|
myConceptMapGroupElementTargetDao.flush();
|
||||||
|
myConceptMapGroupElementDao.flush();
|
||||||
|
myConceptMapGroupDao.flush();
|
||||||
|
myConceptMapDao.flush();
|
||||||
|
ourLog.info("Done flushing.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@Transactional
|
||||||
|
public void deleteConceptMapAndChildren(ResourceTable theResourceTable) {
|
||||||
|
deleteConceptMap(theResourceTable);
|
||||||
|
}
|
||||||
|
|
||||||
private int ensureParentsSaved(Collection<TermConceptParentChildLink> theParents) {
|
private int ensureParentsSaved(Collection<TermConceptParentChildLink> theParents) {
|
||||||
ourLog.trace("Checking {} parents", theParents.size());
|
ourLog.trace("Checking {} parents", theParents.size());
|
||||||
int retVal = 0;
|
int retVal = 0;
|
||||||
|
@ -296,133 +421,17 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
public ValueSet expandValueSet(ValueSet theValueSetToExpand) {
|
public ValueSet expandValueSet(ValueSet theValueSetToExpand) {
|
||||||
ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent();
|
ValueSet.ValueSetExpansionComponent expansionComponent = new ValueSet.ValueSetExpansionComponent();
|
||||||
Set<String> addedCodes = new HashSet<>();
|
Set<String> addedCodes = new HashSet<>();
|
||||||
boolean haveIncludeCriteria = false;
|
|
||||||
|
|
||||||
|
// Handle includes
|
||||||
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) {
|
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getInclude()) {
|
||||||
String system = include.getSystem();
|
boolean add = true;
|
||||||
if (isNotBlank(system)) {
|
expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add);
|
||||||
ourLog.info("Starting expansion around code system: {}", system);
|
}
|
||||||
|
|
||||||
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
|
// Handle excludes
|
||||||
if (cs != null) {
|
for (ValueSet.ConceptSetComponent include : theValueSetToExpand.getCompose().getExclude()) {
|
||||||
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
boolean add = false;
|
||||||
|
expandValueSetHandleIncludeOrExclude(expansionComponent, addedCodes, include, add);
|
||||||
/*
|
|
||||||
* Include Concepts
|
|
||||||
*/
|
|
||||||
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) {
|
|
||||||
String nextCode = next.getCode();
|
|
||||||
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
|
|
||||||
haveIncludeCriteria = true;
|
|
||||||
TermConcept code = findCode(system, nextCode);
|
|
||||||
if (code != null) {
|
|
||||||
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, code);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Filters
|
|
||||||
*/
|
|
||||||
|
|
||||||
if (include.getFilter().size() > 0) {
|
|
||||||
haveIncludeCriteria = true;
|
|
||||||
|
|
||||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
|
||||||
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
|
|
||||||
BooleanJunction<?> bool = qb.bool();
|
|
||||||
|
|
||||||
bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery());
|
|
||||||
|
|
||||||
for (ValueSet.ConceptSetFilterComponent nextFilter : include.getFilter()) {
|
|
||||||
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isBlank(nextFilter.getValue()) || nextFilter.getOp() == null || isBlank(nextFilter.getProperty())) {
|
|
||||||
throw new InvalidRequestException("Invalid filter, must have fields populated: property op value");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
if (nextFilter.getProperty().equals("display:exact") && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
|
||||||
addDisplayFilterExact(qb, bool, nextFilter);
|
|
||||||
} else if ("display".equals(nextFilter.getProperty()) && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
|
||||||
if (nextFilter.getValue().trim().contains(" ")) {
|
|
||||||
addDisplayFilterExact(qb, bool, nextFilter);
|
|
||||||
} else {
|
|
||||||
addDisplayFilterInexact(qb, bool, nextFilter);
|
|
||||||
}
|
|
||||||
} else if ((nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) && nextFilter.getOp() == ValueSet.FilterOperator.ISA) {
|
|
||||||
|
|
||||||
TermConcept code = findCode(system, nextFilter.getValue());
|
|
||||||
if (code == null) {
|
|
||||||
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
ourLog.info(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
|
|
||||||
bool.must(qb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery());
|
|
||||||
|
|
||||||
} else {
|
|
||||||
|
|
||||||
bool.must(qb.phrase().onField("myProperties").sentence(nextFilter.getProperty() + "=" + nextFilter.getValue()).createQuery());
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Query luceneQuery = bool.createQuery();
|
|
||||||
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
|
|
||||||
jpaQuery.setMaxResults(1000);
|
|
||||||
|
|
||||||
StopWatch sw = new StopWatch();
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
List<TermConcept> result = jpaQuery.getResultList();
|
|
||||||
|
|
||||||
ourLog.info("Expansion completed in {}ms", sw.getMillis());
|
|
||||||
|
|
||||||
for (TermConcept nextConcept : result) {
|
|
||||||
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, nextConcept);
|
|
||||||
}
|
|
||||||
|
|
||||||
expansionComponent.setTotal(jpaQuery.getResultSize());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!haveIncludeCriteria) {
|
|
||||||
List<TermConcept> allCodes = findCodes(system);
|
|
||||||
for (TermConcept nextConcept : allCodes) {
|
|
||||||
addCodeIfNotAlreadyAdded(system, expansionComponent, addedCodes, nextConcept);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
// No codesystem matching the URL found in the database
|
|
||||||
|
|
||||||
CodeSystem codeSystemFromContext = getCodeSystemFromContext(system);
|
|
||||||
if (codeSystemFromContext == null) {
|
|
||||||
throw new InvalidRequestException("Unknown code system: " + system);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (include.getConcept().isEmpty() == false) {
|
|
||||||
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) {
|
|
||||||
String nextCode = next.getCode();
|
|
||||||
if (isNotBlank(nextCode) && !addedCodes.contains(nextCode)) {
|
|
||||||
CodeSystem.ConceptDefinitionComponent code = findCode(codeSystemFromContext.getConcept(), nextCode);
|
|
||||||
if (code != null) {
|
|
||||||
addedCodes.add(nextCode);
|
|
||||||
ValueSet.ValueSetExpansionContainsComponent contains = expansionComponent.addContains();
|
|
||||||
contains.setCode(nextCode);
|
|
||||||
contains.setSystem(system);
|
|
||||||
contains.setDisplay(code.getDisplay());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
List<CodeSystem.ConceptDefinitionComponent> concept = codeSystemFromContext.getConcept();
|
|
||||||
addConceptsToList(expansionComponent, addedCodes, system, concept);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ValueSet valueSet = new ValueSet();
|
ValueSet valueSet = new ValueSet();
|
||||||
|
@ -443,6 +452,162 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void expandValueSetHandleIncludeOrExclude(ValueSet.ValueSetExpansionComponent theExpansionComponent, Set<String> theAddedCodes, ValueSet.ConceptSetComponent include, boolean theAdd) {
|
||||||
|
String system = include.getSystem();
|
||||||
|
if (isNotBlank(system)) {
|
||||||
|
ourLog.info("Starting expansion around code system: {}", system);
|
||||||
|
|
||||||
|
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(system);
|
||||||
|
if (cs != null) {
|
||||||
|
TermCodeSystemVersion csv = cs.getCurrentVersion();
|
||||||
|
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||||
|
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
|
||||||
|
BooleanJunction<?> bool = qb.bool();
|
||||||
|
|
||||||
|
bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery());
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Include Concepts
|
||||||
|
*/
|
||||||
|
|
||||||
|
String codes = include
|
||||||
|
.getConcept()
|
||||||
|
.stream()
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.map(ValueSet.ConceptReferenceComponent::getCode)
|
||||||
|
.filter(StringUtils::isNotBlank)
|
||||||
|
.collect(Collectors.joining(" "));
|
||||||
|
if (isNotBlank(codes)) {
|
||||||
|
bool.must(qb.keyword().onField("myCode").matching(codes).createQuery());
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Filters
|
||||||
|
*/
|
||||||
|
|
||||||
|
if (include.getFilter().size() > 0) {
|
||||||
|
|
||||||
|
for (ValueSet.ConceptSetFilterComponent nextFilter : include.getFilter()) {
|
||||||
|
if (isBlank(nextFilter.getValue()) && nextFilter.getOp() == null && isBlank(nextFilter.getProperty())) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isBlank(nextFilter.getValue()) || nextFilter.getOp() == null || isBlank(nextFilter.getProperty())) {
|
||||||
|
throw new InvalidRequestException("Invalid filter, must have fields populated: property op value");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if (nextFilter.getProperty().equals("display:exact") && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||||
|
addDisplayFilterExact(qb, bool, nextFilter);
|
||||||
|
} else if ("display".equals(nextFilter.getProperty()) && nextFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||||
|
if (nextFilter.getValue().trim().contains(" ")) {
|
||||||
|
addDisplayFilterExact(qb, bool, nextFilter);
|
||||||
|
} else {
|
||||||
|
addDisplayFilterInexact(qb, bool, nextFilter);
|
||||||
|
}
|
||||||
|
} else if (nextFilter.getProperty().equals("concept") || nextFilter.getProperty().equals("code")) {
|
||||||
|
|
||||||
|
TermConcept code = findCode(system, nextFilter.getValue());
|
||||||
|
if (code == null) {
|
||||||
|
throw new InvalidRequestException("Invalid filter criteria - code does not exist: {" + system + "}" + nextFilter.getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextFilter.getOp() == ValueSet.FilterOperator.ISA) {
|
||||||
|
ourLog.info(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
|
||||||
|
bool.must(qb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery());
|
||||||
|
} else {
|
||||||
|
throw new InvalidRequestException("Don't know how to handle op=" + nextFilter.getOp() + " on property " + nextFilter.getProperty());
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
if (nextFilter.getOp() == ValueSet.FilterOperator.REGEX) {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* We treat the regex filter as a match on the regex
|
||||||
|
* anywhere in the property string. The spec does not
|
||||||
|
* say whether or not this is the right behaviour, but
|
||||||
|
* there are examples that seem to suggest that it is.
|
||||||
|
*/
|
||||||
|
String value = nextFilter.getValue();
|
||||||
|
if (value.endsWith("$")) {
|
||||||
|
value = value.substring(0, value.length() - 1);
|
||||||
|
} else if (value.endsWith(".*") == false) {
|
||||||
|
value = value + ".*";
|
||||||
|
}
|
||||||
|
if (value.startsWith("^") == false && value.startsWith(".*") == false) {
|
||||||
|
value = ".*" + value;
|
||||||
|
} else if (value.startsWith("^")) {
|
||||||
|
value = value.substring(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
Term term = new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + nextFilter.getProperty(), value);
|
||||||
|
RegexpQuery query = new RegexpQuery(term);
|
||||||
|
bool.must(query);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
String value = nextFilter.getValue();
|
||||||
|
Term term = new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + nextFilter.getProperty(), value);
|
||||||
|
bool.must(new TermsQuery(term));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
Query luceneQuery = bool.createQuery();
|
||||||
|
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
|
||||||
|
jpaQuery.setMaxResults(1000);
|
||||||
|
|
||||||
|
StopWatch sw = new StopWatch();
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<TermConcept> result = jpaQuery.getResultList();
|
||||||
|
|
||||||
|
ourLog.info("Expansion completed in {}ms", sw.getMillis());
|
||||||
|
|
||||||
|
for (TermConcept nextConcept : result) {
|
||||||
|
addCodeIfNotAlreadyAdded(system, theExpansionComponent, theAddedCodes, nextConcept, theAdd);
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// No codesystem matching the URL found in the database
|
||||||
|
|
||||||
|
CodeSystem codeSystemFromContext = getCodeSystemFromContext(system);
|
||||||
|
if (codeSystemFromContext == null) {
|
||||||
|
throw new InvalidRequestException("Unknown code system: " + system);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (include.getConcept().isEmpty() == false) {
|
||||||
|
for (ValueSet.ConceptReferenceComponent next : include.getConcept()) {
|
||||||
|
String nextCode = next.getCode();
|
||||||
|
if (isNotBlank(nextCode) && !theAddedCodes.contains(nextCode)) {
|
||||||
|
CodeSystem.ConceptDefinitionComponent code = findCode(codeSystemFromContext.getConcept(), nextCode);
|
||||||
|
if (code != null) {
|
||||||
|
if (theAdd && theAddedCodes.add(nextCode)) {
|
||||||
|
ValueSet.ValueSetExpansionContainsComponent contains = theExpansionComponent.addContains();
|
||||||
|
contains.setCode(nextCode);
|
||||||
|
contains.setSystem(system);
|
||||||
|
contains.setDisplay(code.getDisplay());
|
||||||
|
}
|
||||||
|
if (!theAdd && theAddedCodes.remove(nextCode)) {
|
||||||
|
removeCodeFromExpansion(system, nextCode, theExpansionComponent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
List<CodeSystem.ConceptDefinitionComponent> concept = codeSystemFromContext.getConcept();
|
||||||
|
addConceptsToList(theExpansionComponent, theAddedCodes, system, concept, theAdd);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void fetchChildren(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
|
private void fetchChildren(TermConcept theConcept, Set<TermConcept> theSetToPopulate) {
|
||||||
for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
|
for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
|
||||||
TermConcept nextChild = nextChildLink.getChild();
|
TermConcept nextChild = nextChildLink.getChild();
|
||||||
|
@ -745,6 +910,14 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void removeCodeFromExpansion(String theCodeSystem, String theCode, ValueSet.ValueSetExpansionComponent theExpansionComponent) {
|
||||||
|
theExpansionComponent
|
||||||
|
.getContains()
|
||||||
|
.removeIf(t ->
|
||||||
|
theCodeSystem.equals(t.getSystem()) &&
|
||||||
|
theCode.equals(t.getCode()));
|
||||||
|
}
|
||||||
|
|
||||||
private int saveConcept(TermConcept theConcept) {
|
private int saveConcept(TermConcept theConcept) {
|
||||||
int retVal = 0;
|
int retVal = 0;
|
||||||
|
|
||||||
|
@ -788,15 +961,16 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
return;
|
return;
|
||||||
} else if (myDeferredConcepts.isEmpty() && myConceptLinksToSaveLater.isEmpty()) {
|
} else if (myDeferredConcepts.isEmpty() && myConceptLinksToSaveLater.isEmpty()) {
|
||||||
processReindexing();
|
processReindexing();
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
|
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
|
||||||
tt.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
|
tt.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRES_NEW);
|
||||||
tt.execute(t -> {
|
if (!myDeferredConcepts.isEmpty() || !myConceptLinksToSaveLater.isEmpty()) {
|
||||||
processDeferredConcepts();
|
tt.execute(t -> {
|
||||||
return null;
|
processDeferredConcepts();
|
||||||
});
|
return null;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (myDeferredValueSets.size() > 0) {
|
if (myDeferredValueSets.size() > 0) {
|
||||||
tt.execute(t -> {
|
tt.execute(t -> {
|
||||||
|
@ -847,20 +1021,12 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
|
|
||||||
ourLog.info("Deleting old code system versions");
|
ourLog.info("Deleting old code system versions");
|
||||||
for (TermCodeSystemVersion next : existing) {
|
for (TermCodeSystemVersion next : existing) {
|
||||||
ourLog.info(" * Deleting code system version {}", next.getPid());
|
Long codeSystemVersionPid = next.getPid();
|
||||||
myConceptParentChildLinkDao.deleteByCodeSystemVersion(next.getPid());
|
deleteCodeSystemVersion(codeSystemVersionPid);
|
||||||
for (TermConcept nextConcept : myConceptDao.findByCodeSystemVersion(next.getPid())) {
|
|
||||||
myConceptPropertyDao.deleteAll(nextConcept.getProperties());
|
|
||||||
myConceptDao.delete(nextConcept);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ourLog.info("Flushing...");
|
ourLog.info("Flushing...");
|
||||||
|
|
||||||
myConceptParentChildLinkDao.flush();
|
|
||||||
myConceptPropertyDao.flush();
|
|
||||||
myConceptDao.flush();
|
myConceptDao.flush();
|
||||||
|
|
||||||
ourLog.info("Done flushing");
|
ourLog.info("Done flushing");
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -905,7 +1071,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
codeSystem.setCurrentVersion(theCodeSystemVersion);
|
codeSystem.setCurrentVersion(theCodeSystemVersion);
|
||||||
codeSystem = myCodeSystemDao.saveAndFlush(codeSystem);
|
codeSystem = myCodeSystemDao.saveAndFlush(codeSystem);
|
||||||
|
|
||||||
ourLog.info("Setting codesystemversion on {} concepts...", totalCodeCount);
|
ourLog.info("Setting CodeSystemVersion[{}] on {} concepts...", codeSystem.getPid(), totalCodeCount);
|
||||||
|
|
||||||
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
||||||
populateVersion(next, codeSystemVersion);
|
populateVersion(next, codeSystemVersion);
|
||||||
|
@ -963,42 +1129,10 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
termConceptMap.setResource(theResourceTable);
|
termConceptMap.setResource(theResourceTable);
|
||||||
termConceptMap.setUrl(theConceptMap.getUrl());
|
termConceptMap.setUrl(theConceptMap.getUrl());
|
||||||
|
|
||||||
// Get existing entity so it can be deleted.
|
|
||||||
Optional<TermConceptMap> optionalExistingTermConceptMapById = myConceptMapDao.findTermConceptMapByResourcePid(theResourceTable.getId());
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* For now we always delete old versions. At some point, it would be nice to allow configuration to keep old versions.
|
* For now we always delete old versions. At some point, it would be nice to allow configuration to keep old versions.
|
||||||
*/
|
*/
|
||||||
|
deleteConceptMap(theResourceTable);
|
||||||
if (optionalExistingTermConceptMapById.isPresent()) {
|
|
||||||
TermConceptMap existingTermConceptMap = optionalExistingTermConceptMapById.get();
|
|
||||||
|
|
||||||
ourLog.info("Deleting existing TermConceptMap {} and its children...", existingTermConceptMap.getId());
|
|
||||||
for (TermConceptMapGroup group : existingTermConceptMap.getConceptMapGroups()) {
|
|
||||||
|
|
||||||
for (TermConceptMapGroupElement element : group.getConceptMapGroupElements()) {
|
|
||||||
|
|
||||||
for (TermConceptMapGroupElementTarget target : element.getConceptMapGroupElementTargets()) {
|
|
||||||
|
|
||||||
myConceptMapGroupElementTargetDao.deleteTermConceptMapGroupElementTargetById(target.getId());
|
|
||||||
}
|
|
||||||
|
|
||||||
myConceptMapGroupElementDao.deleteTermConceptMapGroupElementById(element.getId());
|
|
||||||
}
|
|
||||||
|
|
||||||
myConceptMapGroupDao.deleteTermConceptMapGroupById(group.getId());
|
|
||||||
}
|
|
||||||
|
|
||||||
myConceptMapDao.deleteTermConceptMapById(existingTermConceptMap.getId());
|
|
||||||
ourLog.info("Done deleting existing TermConceptMap {} and its children.", existingTermConceptMap.getId());
|
|
||||||
|
|
||||||
ourLog.info("Flushing...");
|
|
||||||
myConceptMapGroupElementTargetDao.flush();
|
|
||||||
myConceptMapGroupElementDao.flush();
|
|
||||||
myConceptMapGroupDao.flush();
|
|
||||||
myConceptMapDao.flush();
|
|
||||||
ourLog.info("Done flushing.");
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Do the upload.
|
* Do the upload.
|
||||||
|
@ -1019,6 +1153,7 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
throw new InternalErrorException(fe);
|
throw new InternalErrorException(fe);
|
||||||
}
|
}
|
||||||
myConceptMapDao.save(termConceptMap);
|
myConceptMapDao.save(termConceptMap);
|
||||||
|
int codesSaved = 0;
|
||||||
|
|
||||||
if (theConceptMap.hasGroup()) {
|
if (theConceptMap.hasGroup()) {
|
||||||
TermConceptMapGroup termConceptMapGroup;
|
TermConceptMapGroup termConceptMapGroup;
|
||||||
|
@ -1054,7 +1189,12 @@ public abstract class BaseHapiTerminologySvcImpl implements IHapiTerminologySvc,
|
||||||
termConceptMapGroupElementTarget.setCode(target.getCode());
|
termConceptMapGroupElementTarget.setCode(target.getCode());
|
||||||
termConceptMapGroupElementTarget.setDisplay(target.getDisplay());
|
termConceptMapGroupElementTarget.setDisplay(target.getDisplay());
|
||||||
termConceptMapGroupElementTarget.setEquivalence(target.getEquivalence());
|
termConceptMapGroupElementTarget.setEquivalence(target.getEquivalence());
|
||||||
myConceptMapGroupElementTargetDao.saveAndFlush(termConceptMapGroupElementTarget);
|
myConceptMapGroupElementTargetDao.save(termConceptMapGroupElementTarget);
|
||||||
|
|
||||||
|
if (codesSaved++ % 250 == 0) {
|
||||||
|
ourLog.info("Have saved {} codes in conceptmap", codesSaved);
|
||||||
|
myConceptMapGroupElementTargetDao.flush();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,6 +68,8 @@ public interface IHapiTerminologySvc {
|
||||||
*/
|
*/
|
||||||
IIdType storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<org.hl7.fhir.r4.model.ValueSet> theValueSets, List<org.hl7.fhir.r4.model.ConceptMap> theConceptMaps);
|
IIdType storeNewCodeSystemVersion(org.hl7.fhir.r4.model.CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequestDetails, List<org.hl7.fhir.r4.model.ValueSet> theValueSets, List<org.hl7.fhir.r4.model.ConceptMap> theConceptMaps);
|
||||||
|
|
||||||
|
void deleteConceptMapAndChildren(ResourceTable theResourceTable);
|
||||||
|
|
||||||
void storeTermConceptMapAndChildren(ResourceTable theResourceTable, ConceptMap theConceptMap);
|
void storeTermConceptMapAndChildren(ResourceTable theResourceTable, ConceptMap theConceptMap);
|
||||||
|
|
||||||
boolean supportsSystem(String theCodeSystem);
|
boolean supportsSystem(String theCodeSystem);
|
||||||
|
|
|
@ -11,6 +11,7 @@ import ca.uhn.fhir.jpa.term.snomedct.SctHandlerRelationship;
|
||||||
import ca.uhn.fhir.jpa.util.Counter;
|
import ca.uhn.fhir.jpa.util.Counter;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
|
@ -62,23 +63,27 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
|
public static final String SCT_FILE_CONCEPT = "Terminology/sct2_Concept_Full_";
|
||||||
public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
|
public static final String SCT_FILE_DESCRIPTION = "Terminology/sct2_Description_Full-en";
|
||||||
public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
|
public static final String SCT_FILE_RELATIONSHIP = "Terminology/sct2_Relationship_Full";
|
||||||
public static final String LOINC_ANSWERLIST_FILE = "AnswerList_Beta_1.csv";
|
public static final String LOINC_ANSWERLIST_FILE = "AnswerList.csv";
|
||||||
public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink_Beta_1.csv";
|
public static final String LOINC_ANSWERLIST_LINK_FILE = "LoincAnswerListLink.csv";
|
||||||
public static final String LOINC_DOCUMENT_ONTOLOGY_FILE = "DocumentOntology.csv";
|
public static final String LOINC_DOCUMENT_ONTOLOGY_FILE = "DocumentOntology.csv";
|
||||||
public static final String LOINC_UPLOAD_PROPERTIES_FILE = "loincupload.properties";
|
public static final String LOINC_UPLOAD_PROPERTIES_FILE = "loincupload.properties";
|
||||||
public static final String LOINC_FILE = "loinc.csv";
|
public static final String LOINC_FILE = "Loinc.csv";
|
||||||
public static final String LOINC_HIERARCHY_FILE = "MULTI-AXIAL_HIERARCHY.CSV";
|
public static final String LOINC_HIERARCHY_FILE = "MultiAxialHierarchy.csv";
|
||||||
public static final String LOINC_PART_FILE = "Part_Beta_1.csv";
|
public static final String LOINC_PART_FILE = "Part.csv";
|
||||||
public static final String LOINC_PART_LINK_FILE = "LoincPartLink_Beta_1.csv";
|
public static final String LOINC_PART_LINK_FILE = "LoincPartLink.csv";
|
||||||
public static final String LOINC_PART_RELATED_CODE_MAPPING_FILE = "PartRelatedCodeMapping_Beta_1.csv";
|
public static final String LOINC_PART_RELATED_CODE_MAPPING_FILE = "PartRelatedCodeMapping.csv";
|
||||||
public static final String LOINC_RSNA_PLAYBOOK_FILE = "LoincRsnaRadiologyPlaybook.csv";
|
public static final String LOINC_RSNA_PLAYBOOK_FILE = "LoincRsnaRadiologyPlaybook.csv";
|
||||||
public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE = "Top2000CommonLabResultsUS.csv";
|
public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE = "Top2000CommonLabResultsUs.csv";
|
||||||
public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE = "Top2000CommonLabResultsSI.csv";
|
public static final String LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE = "Top2000CommonLabResultsSi.csv";
|
||||||
public static final String LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE = "LoincUniversalLabOrdersValueSet.csv";
|
public static final String LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE = "LoincUniversalLabOrdersValueSet.csv";
|
||||||
public static final String LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV = "LoincIeeeMedicalDeviceCodeMappingTable.csv";
|
public static final String LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV = "LoincIeeeMedicalDeviceCodeMappingTable.csv";
|
||||||
public static final String LOINC_IMAGING_DOCUMENT_CODES_FILE = "ImagingDocumentCodes.csv";
|
public static final String LOINC_IMAGING_DOCUMENT_CODES_FILE = "ImagingDocumentCodes.csv";
|
||||||
private static final int LOG_INCREMENT = 100000;
|
public static final String LOINC_GROUP_FILE = "Group.csv";
|
||||||
|
public static final String LOINC_GROUP_TERMS_FILE = "GroupLoincTerms.csv";
|
||||||
|
public static final String LOINC_PARENT_GROUP_FILE = "ParentGroup.csv";
|
||||||
|
private static final int LOG_INCREMENT = 1000;
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyLoaderSvcImpl.class);
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private IHapiTerminologySvc myTermSvc;
|
private IHapiTerminologySvc myTermSvc;
|
||||||
@Autowired(required = false)
|
@Autowired(required = false)
|
||||||
|
@ -119,12 +124,20 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode) {
|
private void iterateOverZipFile(LoadedFileDescriptors theDescriptors, String theFileNamePart, IRecordHandler theHandler, char theDelimiter, QuoteMode theQuoteMode, boolean theIsPartialFilename) {
|
||||||
|
|
||||||
|
boolean foundMatch = false;
|
||||||
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
|
for (FileDescriptor nextZipBytes : theDescriptors.getUncompressedFileDescriptors()) {
|
||||||
String nextFilename = nextZipBytes.getFilename();
|
String nextFilename = nextZipBytes.getFilename();
|
||||||
if (nextFilename.contains(theFileNamePart)) {
|
boolean matches;
|
||||||
|
if (theIsPartialFilename) {
|
||||||
|
matches = nextFilename.contains(theFileNamePart);
|
||||||
|
} else {
|
||||||
|
matches = nextFilename.endsWith("/" + theFileNamePart) || nextFilename.equals(theFileNamePart);
|
||||||
|
}
|
||||||
|
if (matches) {
|
||||||
ourLog.info("Processing file {}", nextFilename);
|
ourLog.info("Processing file {}", nextFilename);
|
||||||
|
foundMatch = true;
|
||||||
|
|
||||||
Reader reader;
|
Reader reader;
|
||||||
CSVParser parsed;
|
CSVParser parsed;
|
||||||
|
@ -149,6 +162,9 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
int nextLoggedCount = 0;
|
int nextLoggedCount = 0;
|
||||||
while (iter.hasNext()) {
|
while (iter.hasNext()) {
|
||||||
CSVRecord nextRecord = iter.next();
|
CSVRecord nextRecord = iter.next();
|
||||||
|
if (nextRecord.isConsistent()==false) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
theHandler.accept(nextRecord);
|
theHandler.accept(nextRecord);
|
||||||
count++;
|
count++;
|
||||||
if (count >= nextLoggedCount) {
|
if (count >= nextLoggedCount) {
|
||||||
|
@ -164,6 +180,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!foundMatch) {
|
||||||
|
throw new InvalidRequestException("Did not find file matching " + theFileNamePart);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -171,10 +191,7 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles);
|
LoadedFileDescriptors descriptors = new LoadedFileDescriptors(theFiles);
|
||||||
List<String> mandatoryFilenameFragments = Arrays.asList(
|
List<String> mandatoryFilenameFragments = Arrays.asList(
|
||||||
LOINC_FILE,
|
LOINC_FILE,
|
||||||
LOINC_HIERARCHY_FILE);
|
LOINC_HIERARCHY_FILE,
|
||||||
descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments);
|
|
||||||
|
|
||||||
List<String> optionalFilenameFragments = Arrays.asList(
|
|
||||||
LOINC_UPLOAD_PROPERTIES_FILE,
|
LOINC_UPLOAD_PROPERTIES_FILE,
|
||||||
LOINC_ANSWERLIST_FILE,
|
LOINC_ANSWERLIST_FILE,
|
||||||
LOINC_ANSWERLIST_LINK_FILE,
|
LOINC_ANSWERLIST_LINK_FILE,
|
||||||
|
@ -189,6 +206,10 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV,
|
LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV,
|
||||||
LOINC_IMAGING_DOCUMENT_CODES_FILE
|
LOINC_IMAGING_DOCUMENT_CODES_FILE
|
||||||
);
|
);
|
||||||
|
descriptors.verifyMandatoryFilesExist(mandatoryFilenameFragments);
|
||||||
|
|
||||||
|
List<String> optionalFilenameFragments = Arrays.asList(
|
||||||
|
);
|
||||||
descriptors.verifyOptionalFilesExist(optionalFilenameFragments);
|
descriptors.verifyOptionalFilesExist(optionalFilenameFragments);
|
||||||
|
|
||||||
ourLog.info("Beginning LOINC processing");
|
ourLog.info("Beginning LOINC processing");
|
||||||
|
@ -251,60 +272,75 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
|
|
||||||
// Part file
|
// Part file
|
||||||
handler = new LoincPartHandler(codeSystemVersion, code2concept);
|
handler = new LoincPartHandler(codeSystemVersion, code2concept);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_PART_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
Map<PartTypeAndPartName, String> partTypeAndPartNameToPartNumber = ((LoincPartHandler) handler).getPartTypeAndPartNameToPartNumber();
|
Map<PartTypeAndPartName, String> partTypeAndPartNameToPartNumber = ((LoincPartHandler) handler).getPartTypeAndPartNameToPartNumber();
|
||||||
|
|
||||||
// Loinc Codes
|
// Loinc Codes
|
||||||
handler = new LoincHandler(codeSystemVersion, code2concept, propertyNamesToTypes, partTypeAndPartNameToPartNumber);
|
handler = new LoincHandler(codeSystemVersion, code2concept, propertyNamesToTypes, partTypeAndPartNameToPartNumber);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// Loinc Hierarchy
|
// Loinc Hierarchy
|
||||||
handler = new LoincHierarchyHandler(codeSystemVersion, code2concept);
|
handler = new LoincHierarchyHandler(codeSystemVersion, code2concept);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_HIERARCHY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// Answer lists (ValueSets of potential answers/values for loinc "questions")
|
// Answer lists (ValueSets of potential answers/values for loinc "questions")
|
||||||
handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, uploadProperties);
|
handler = new LoincAnswerListHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// Answer list links (connects loinc observation codes to answerlist codes)
|
// Answer list links (connects loinc observation codes to answerlist codes)
|
||||||
handler = new LoincAnswerListLinkHandler(code2concept, valueSets);
|
handler = new LoincAnswerListLinkHandler(code2concept, valueSets);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_ANSWERLIST_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
|
// RSNA Playbook file
|
||||||
|
// Note that this should come before the "Part Related Code Mapping"
|
||||||
|
// file because there are some duplicate mappings between these
|
||||||
|
// two files, and the RSNA Playbook file has more metadata
|
||||||
|
handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
|
iterateOverZipFile(theDescriptors, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// Part link file
|
// Part link file
|
||||||
handler = new LoincPartLinkHandler(codeSystemVersion, code2concept);
|
handler = new LoincPartLinkHandler(codeSystemVersion, code2concept);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_PART_LINK_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// Part related code mapping
|
// Part related code mapping
|
||||||
handler = new LoincPartRelatedCodeMappingHandler(codeSystemVersion, code2concept, valueSets, conceptMaps, uploadProperties);
|
handler = new LoincPartRelatedCodeMappingHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_PART_RELATED_CODE_MAPPING_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// Document Ontology File
|
// Document Ontology File
|
||||||
handler = new LoincDocumentOntologyHandler(code2concept, propertyNamesToTypes, valueSets, conceptMaps, uploadProperties);
|
handler = new LoincDocumentOntologyHandler(code2concept, propertyNamesToTypes, valueSets, conceptMaps, uploadProperties);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_DOCUMENT_ONTOLOGY_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// RSNA Playbook file
|
|
||||||
handler = new LoincRsnaPlaybookHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
|
||||||
iterateOverZipFile(theDescriptors, LOINC_RSNA_PLAYBOOK_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
|
||||||
|
|
||||||
// Top 2000 Codes - US
|
// Top 2000 Codes - US
|
||||||
handler = new LoincTop2000LabResultsUsHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
handler = new LoincTop2000LabResultsUsHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// Top 2000 Codes - SI
|
// Top 2000 Codes - SI
|
||||||
handler = new LoincTop2000LabResultsSiHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
handler = new LoincTop2000LabResultsSiHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// Universal Lab Order ValueSet
|
// Universal Lab Order ValueSet
|
||||||
handler = new LoincUniversalOrderSetHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
handler = new LoincUniversalOrderSetHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// IEEE Medical Device Codes
|
// IEEE Medical Device Codes
|
||||||
handler = new LoincIeeeMedicalDeviceCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
handler = new LoincIeeeMedicalDeviceCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
// Imaging Document Codes
|
// Imaging Document Codes
|
||||||
handler = new LoincImagingDocumentCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
handler = new LoincImagingDocumentCodeHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
iterateOverZipFile(theDescriptors, LOINC_IMAGING_DOCUMENT_CODES_FILE, handler, ',', QuoteMode.NON_NUMERIC);
|
iterateOverZipFile(theDescriptors, LOINC_IMAGING_DOCUMENT_CODES_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
|
// Group File
|
||||||
|
handler = new LoincGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
|
iterateOverZipFile(theDescriptors, LOINC_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
|
// Group Terms File
|
||||||
|
handler = new LoincGroupTermsFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
|
iterateOverZipFile(theDescriptors, LOINC_GROUP_TERMS_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
|
// Parent Group File
|
||||||
|
handler = new LoincParentGroupFileHandler(code2concept, valueSets, conceptMaps, uploadProperties);
|
||||||
|
iterateOverZipFile(theDescriptors, LOINC_PARENT_GROUP_FILE, handler, ',', QuoteMode.NON_NUMERIC, false);
|
||||||
|
|
||||||
IOUtils.closeQuietly(theDescriptors);
|
IOUtils.closeQuietly(theDescriptors);
|
||||||
|
|
||||||
|
@ -332,18 +368,18 @@ public class TerminologyLoaderSvcImpl implements IHapiTerminologyLoaderSvc {
|
||||||
final Set<String> validConceptIds = new HashSet<>();
|
final Set<String> validConceptIds = new HashSet<>();
|
||||||
|
|
||||||
IRecordHandler handler = new SctHandlerConcept(validConceptIds);
|
IRecordHandler handler = new SctHandlerConcept(validConceptIds);
|
||||||
iterateOverZipFile(theDescriptors, SCT_FILE_CONCEPT, handler, '\t', null);
|
iterateOverZipFile(theDescriptors, SCT_FILE_CONCEPT, handler, '\t', null, true);
|
||||||
|
|
||||||
ourLog.info("Have {} valid concept IDs", validConceptIds.size());
|
ourLog.info("Have {} valid concept IDs", validConceptIds.size());
|
||||||
|
|
||||||
handler = new SctHandlerDescription(validConceptIds, code2concept, id2concept, codeSystemVersion);
|
handler = new SctHandlerDescription(validConceptIds, code2concept, id2concept, codeSystemVersion);
|
||||||
iterateOverZipFile(theDescriptors, SCT_FILE_DESCRIPTION, handler, '\t', null);
|
iterateOverZipFile(theDescriptors, SCT_FILE_DESCRIPTION, handler, '\t', null, true);
|
||||||
|
|
||||||
ourLog.info("Got {} concepts, cloning map", code2concept.size());
|
ourLog.info("Got {} concepts, cloning map", code2concept.size());
|
||||||
final HashMap<String, TermConcept> rootConcepts = new HashMap<>(code2concept);
|
final HashMap<String, TermConcept> rootConcepts = new HashMap<>(code2concept);
|
||||||
|
|
||||||
handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
|
handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
|
||||||
iterateOverZipFile(theDescriptors, SCT_FILE_RELATIONSHIP, handler, '\t', null);
|
iterateOverZipFile(theDescriptors, SCT_FILE_RELATIONSHIP, handler, '\t', null, true);
|
||||||
|
|
||||||
IOUtils.closeQuietly(theDescriptors);
|
IOUtils.closeQuietly(theDescriptors);
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,8 @@ import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
import org.hl7.fhir.r4.model.ContactPoint;
|
import org.hl7.fhir.r4.model.ContactPoint;
|
||||||
import org.hl7.fhir.r4.model.Enumerations;
|
import org.hl7.fhir.r4.model.Enumerations;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -35,8 +37,9 @@ import java.util.Properties;
|
||||||
import static org.apache.commons.lang3.StringUtils.*;
|
import static org.apache.commons.lang3.StringUtils.*;
|
||||||
|
|
||||||
public abstract class BaseLoincHandler implements IRecordHandler {
|
public abstract class BaseLoincHandler implements IRecordHandler {
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(BaseLoincHandler.class);
|
||||||
public static final String LOINC_COPYRIGHT_STATEMENT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/";
|
public static final String LOINC_COPYRIGHT_STATEMENT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is <b>NOT</b> the LOINC CodeSystem URI! It is just
|
* This is <b>NOT</b> the LOINC CodeSystem URI! It is just
|
||||||
* the website URL to LOINC.
|
* the website URL to LOINC.
|
||||||
|
@ -52,8 +55,10 @@ public abstract class BaseLoincHandler implements IRecordHandler {
|
||||||
|
|
||||||
BaseLoincHandler(Map<String, TermConcept> theCode2Concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
BaseLoincHandler(Map<String, TermConcept> theCode2Concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
||||||
myValueSets = theValueSets;
|
myValueSets = theValueSets;
|
||||||
|
myValueSets.forEach(t -> myIdToValueSet.put(t.getId(), t));
|
||||||
myCode2Concept = theCode2Concept;
|
myCode2Concept = theCode2Concept;
|
||||||
myConceptMaps = theConceptMaps;
|
myConceptMaps = theConceptMaps;
|
||||||
|
myConceptMaps.forEach(t -> myIdToConceptMaps.put(t.getId(), t));
|
||||||
myUploadProperties = theUploadProperties;
|
myUploadProperties = theUploadProperties;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,10 +85,9 @@ public abstract class BaseLoincHandler implements IRecordHandler {
|
||||||
|
|
||||||
String displayName = theDisplayName;
|
String displayName = theDisplayName;
|
||||||
if (isBlank(displayName)) {
|
if (isBlank(displayName)) {
|
||||||
for (TermConcept next : myCode2Concept.values()) {
|
TermConcept concept = myCode2Concept.get(theCode);
|
||||||
if (next.getCode().equals(theCode)) {
|
if (concept != null) {
|
||||||
displayName = next.getDisplay();
|
displayName = concept.getDisplay();
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -176,6 +180,8 @@ public abstract class BaseLoincHandler implements IRecordHandler {
|
||||||
.setCode(theMapping.getTargetCode())
|
.setCode(theMapping.getTargetCode())
|
||||||
.setDisplay(theMapping.getTargetDisplay())
|
.setDisplay(theMapping.getTargetDisplay())
|
||||||
.setEquivalence(theMapping.getEquivalence());
|
.setEquivalence(theMapping.getEquivalence());
|
||||||
|
} else {
|
||||||
|
ourLog.info("Not going to add a mapping from [{}/{}] to [{}/{}] because one already exists", theMapping.getSourceCodeSystem(), theMapping.getSourceCode(), theMapping.getTargetCodeSystem(), theMapping.getTargetCode());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,7 +198,6 @@ public abstract class BaseLoincHandler implements IRecordHandler {
|
||||||
vs.setUrl(theValueSetUri);
|
vs.setUrl(theValueSetUri);
|
||||||
vs.setId(theValueSetId);
|
vs.setId(theValueSetId);
|
||||||
vs.setVersion(version);
|
vs.setVersion(version);
|
||||||
vs.setName(theValueSetName);
|
|
||||||
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||||
vs.setPublisher(REGENSTRIEF_INSTITUTE_INC);
|
vs.setPublisher(REGENSTRIEF_INSTITUTE_INC);
|
||||||
vs.addContact()
|
vs.addContact()
|
||||||
|
@ -206,6 +211,11 @@ public abstract class BaseLoincHandler implements IRecordHandler {
|
||||||
} else {
|
} else {
|
||||||
vs = myIdToValueSet.get(theValueSetId);
|
vs = myIdToValueSet.get(theValueSetId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (isBlank(vs.getName()) && isNotBlank(theValueSetName)) {
|
||||||
|
vs.setName(theValueSetName);
|
||||||
|
}
|
||||||
|
|
||||||
return vs;
|
return vs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,62 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.loinc;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2018 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
|
public class LoincGroupFileHandler extends BaseLoincHandler implements IRecordHandler {
|
||||||
|
|
||||||
|
public static final String VS_URI_PREFIX = "http://loinc.org/vs/";
|
||||||
|
|
||||||
|
public LoincGroupFileHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
||||||
|
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(CSVRecord theRecord) {
|
||||||
|
//"ParentGroupId","GroupId","Group","Archetype","Status","VersionFirstReleased"
|
||||||
|
String parentGroupId = trim(theRecord.get("ParentGroupId"));
|
||||||
|
String groupId = trim(theRecord.get("GroupId"));
|
||||||
|
String groupName = trim(theRecord.get("Group"));
|
||||||
|
|
||||||
|
ValueSet parentValueSet = getValueSet(parentGroupId, VS_URI_PREFIX + parentGroupId, null, null);
|
||||||
|
parentValueSet
|
||||||
|
.getCompose()
|
||||||
|
.getIncludeFirstRep()
|
||||||
|
.addValueSet(VS_URI_PREFIX + groupId);
|
||||||
|
|
||||||
|
// Create group to set its name (terms are added in a different
|
||||||
|
// handler)
|
||||||
|
getValueSet(groupId, VS_URI_PREFIX + groupId, groupName, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,53 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.loinc;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2018 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||||
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
|
public class LoincGroupTermsFileHandler extends BaseLoincHandler implements IRecordHandler {
|
||||||
|
|
||||||
|
public LoincGroupTermsFileHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
||||||
|
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(CSVRecord theRecord) {
|
||||||
|
//"Category","GroupId","Archetype","LoincNumber","LongCommonName"
|
||||||
|
String groupId = trim(theRecord.get("GroupId"));
|
||||||
|
String loincNumber = trim(theRecord.get("LoincNumber"));
|
||||||
|
|
||||||
|
ValueSet valueSet = getValueSet(groupId, LoincGroupFileHandler.VS_URI_PREFIX + groupId, null, null);
|
||||||
|
addCodeAsIncludeToValueSet(valueSet, IHapiTerminologyLoaderSvc.LOINC_URI, loincNumber, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.term.loinc;
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
* You may obtain a copy of the License at
|
* You may obtain a copy of the License at
|
||||||
*
|
*
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
*
|
*
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@ -39,6 +39,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
public class LoincHandler implements IRecordHandler {
|
public class LoincHandler implements IRecordHandler {
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(LoincHandler.class);
|
||||||
private final Map<String, TermConcept> myCode2Concept;
|
private final Map<String, TermConcept> myCode2Concept;
|
||||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
private final TermCodeSystemVersion myCodeSystemVersion;
|
||||||
private final Map<String, CodeSystem.PropertyType> myPropertyNames;
|
private final Map<String, CodeSystem.PropertyType> myPropertyNames;
|
||||||
|
@ -86,7 +87,17 @@ public class LoincHandler implements IRecordHandler {
|
||||||
concept.addPropertyString(nextPropertyName, nextPropertyValue);
|
concept.addPropertyString(nextPropertyName, nextPropertyValue);
|
||||||
break;
|
break;
|
||||||
case CODING:
|
case CODING:
|
||||||
PartTypeAndPartName key = new PartTypeAndPartName(nextPropertyName, nextPropertyValue);
|
// FIXME: handle "Ser/Plas^Donor"
|
||||||
|
String propertyValue = nextPropertyValue;
|
||||||
|
if (nextPropertyName.equals("COMPONENT")) {
|
||||||
|
if (propertyValue.contains("^")) {
|
||||||
|
propertyValue = propertyValue.substring(0, propertyValue.indexOf("^"));
|
||||||
|
} else if (propertyValue.contains("/")) {
|
||||||
|
propertyValue = propertyValue.substring(0, propertyValue.indexOf("/"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PartTypeAndPartName key = new PartTypeAndPartName(nextPropertyName, propertyValue);
|
||||||
String partNumber = myPartTypeAndPartNameToPartNumber.get(key);
|
String partNumber = myPartTypeAndPartNameToPartNumber.get(key);
|
||||||
|
|
||||||
if (partNumber == null && nextPropertyName.equals("TIME_ASPCT")) {
|
if (partNumber == null && nextPropertyName.equals("TIME_ASPCT")) {
|
||||||
|
@ -106,11 +117,12 @@ public class LoincHandler implements IRecordHandler {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate.notBlank(partNumber, "Unknown part: " + key);
|
|
||||||
if (isNotBlank(partNumber)) {
|
if (isNotBlank(partNumber)) {
|
||||||
concept.addPropertyCoding(nextPropertyName, IHapiTerminologyLoaderSvc.LOINC_URI, partNumber, nextPropertyValue);
|
concept.addPropertyCoding(nextPropertyName, IHapiTerminologyLoaderSvc.LOINC_URI, partNumber, nextPropertyValue);
|
||||||
} else {
|
} else {
|
||||||
ourLog.warn("Unable to find part code with TYPE[{}] and NAME[{}]", key.getPartType(), key.getPartName());
|
String msg = "Unable to find part code with TYPE[" + key.getPartType() + "] and NAME[" + nextPropertyValue + "] (using name " + propertyValue + ")";
|
||||||
|
ourLog.warn(msg);
|
||||||
|
// throw new InternalErrorException(msg);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case DECIMAL:
|
case DECIMAL:
|
||||||
|
@ -129,5 +141,4 @@ public class LoincHandler implements IRecordHandler {
|
||||||
myCode2Concept.put(code, concept);
|
myCode2Concept.put(code, concept);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(LoincHandler.class);
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
public class LoincIeeeMedicalDeviceCodeHandler extends BaseLoincHandler implements IRecordHandler {
|
public class LoincIeeeMedicalDeviceCodeHandler extends BaseLoincHandler implements IRecordHandler {
|
||||||
|
|
||||||
public static final String LOINC_IEEE_CM_ID = "LOINC-IEEE-MEDICAL-DEVICE-CM";
|
public static final String LOINC_IEEE_CM_ID = "LOINC-IEEE-MEDICAL-DEVICE-CM";
|
||||||
public static final String LOINC_IEEE_CM_URI = "http://loinc.org/fhir/loinc-ieee-device-code-mappings";
|
public static final String LOINC_IEEE_CM_URI = "http://loinc.org/cm/loinc-to-ieee-device-codes";
|
||||||
public static final String LOINC_IEEE_CM_NAME = "LOINC/IEEE Device Code Mappings";
|
public static final String LOINC_IEEE_CM_NAME = "LOINC/IEEE Device Code Mappings";
|
||||||
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC/IEEE Medical Device Code Mapping Table contains content from IEEE (http://ieee.org), copyright © 2017 IEEE.";
|
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC/IEEE Medical Device Code Mapping Table contains content from IEEE (http://ieee.org), copyright © 2017 IEEE.";
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
public class LoincImagingDocumentCodeHandler extends BaseLoincHandler implements IRecordHandler {
|
public class LoincImagingDocumentCodeHandler extends BaseLoincHandler implements IRecordHandler {
|
||||||
|
|
||||||
public static final String VS_ID = "loinc-imaging-document-codes";
|
public static final String VS_ID = "loinc-imaging-document-codes";
|
||||||
public static final String VS_URI = "http://loinc.org/fhir/loinc-imaging-document-codes";
|
public static final String VS_URI = "http://loinc.org/vs/loinc-imaging-document-codes";
|
||||||
public static final String VS_NAME = "LOINC Imaging Document Codes";
|
public static final String VS_NAME = "LOINC Imaging Document Codes";
|
||||||
|
|
||||||
public LoincImagingDocumentCodeHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
public LoincImagingDocumentCodeHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
||||||
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
package ca.uhn.fhir.jpa.term.loinc;
|
||||||
|
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2018 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
import org.hl7.fhir.r4.model.ConceptMap;
|
||||||
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
|
||||||
|
public class LoincParentGroupFileHandler extends BaseLoincHandler implements IRecordHandler {
|
||||||
|
|
||||||
|
public LoincParentGroupFileHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
||||||
|
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void accept(CSVRecord theRecord) {
|
||||||
|
// "ParentGroupId","ParentGroup","Status"
|
||||||
|
String parentGroupId = trim(theRecord.get("ParentGroupId"));
|
||||||
|
String parentGroupName = trim(theRecord.get("ParentGroup"));
|
||||||
|
|
||||||
|
getValueSet(parentGroupId, LoincGroupFileHandler.VS_URI_PREFIX + parentGroupId, parentGroupName, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.term.loinc;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||||
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
import ca.uhn.fhir.jpa.term.IRecordHandler;
|
||||||
|
@ -41,23 +40,23 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme
|
||||||
|
|
||||||
public static final String LOINC_SCT_PART_MAP_ID = "loinc-parts-to-snomed-ct";
|
public static final String LOINC_SCT_PART_MAP_ID = "loinc-parts-to-snomed-ct";
|
||||||
public static final String LOINC_SCT_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-snomed-ct";
|
public static final String LOINC_SCT_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-snomed-ct";
|
||||||
public static final String LOINC_SCT_PART_MAP_NAME = "LOINC Part Map to SNOMED CT";
|
public static final String LOINC_TERM_TO_RPID_PART_MAP_ID = "loinc-term-to-rpids";
|
||||||
public static final String LOINC_RXNORM_PART_MAP_ID = "loinc-parts-to-rxnorm";
|
public static final String LOINC_TERM_TO_RPID_PART_MAP_URI = "http://loinc.org/cm/loinc-to-rpids";
|
||||||
public static final String LOINC_RXNORM_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-rxnorm";
|
public static final String LOINC_TERM_TO_RPID_PART_MAP_NAME = "LOINC Terms to RadLex RPIDs";
|
||||||
public static final String LOINC_RXNORM_PART_MAP_NAME = "LOINC Part Map to RxNORM";
|
public static final String LOINC_PART_TO_RID_PART_MAP_ID = "loinc-part-to-rids";
|
||||||
public static final String LOINC_RADLEX_PART_MAP_ID = "loinc-parts-to-radlex";
|
public static final String LOINC_PART_TO_RID_PART_MAP_URI = "http://loinc.org/cm/loinc-to-rids";
|
||||||
public static final String LOINC_RADLEX_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-radlex";
|
public static final String LOINC_PART_TO_RID_PART_MAP_NAME = "LOINC Parts to RadLex RIDs";
|
||||||
public static final String LOINC_RADLEX_PART_MAP_NAME = "LOINC Part Map to RADLEX";
|
private static final String LOINC_SCT_PART_MAP_NAME = "LOINC Part Map to SNOMED CT";
|
||||||
|
private static final String LOINC_RXNORM_PART_MAP_ID = "loinc-parts-to-rxnorm";
|
||||||
|
private static final String LOINC_RXNORM_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-rxnorm";
|
||||||
|
private static final String LOINC_RXNORM_PART_MAP_NAME = "LOINC Part Map to RxNORM";
|
||||||
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC Part File, LOINC/SNOMED CT Expression Association and Map Sets File, RELMA database and associated search index files include SNOMED Clinical Terms (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights are reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO. Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org. Under the terms of the Affiliate License, use of SNOMED CT in countries that are not IHTSDO Members is subject to reporting and fee payment obligations. However, IHTSDO agrees to waive the requirements to report and pay fees for use of SNOMED CT content included in the LOINC Part Mapping and LOINC Term Associations for purposes that support or enable more effective use of LOINC. This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.";
|
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC Part File, LOINC/SNOMED CT Expression Association and Map Sets File, RELMA database and associated search index files include SNOMED Clinical Terms (SNOMED CT®) which is used by permission of the International Health Terminology Standards Development Organisation (IHTSDO) under license. All rights are reserved. SNOMED CT® was originally created by The College of American Pathologists. “SNOMED” and “SNOMED CT” are registered trademarks of the IHTSDO. Use of SNOMED CT content is subject to the terms and conditions set forth in the SNOMED CT Affiliate License Agreement. It is the responsibility of those implementing this product to ensure they are appropriately licensed and for more information on the license, including how to register as an Affiliate Licensee, please refer to http://www.snomed.org/snomed-ct/get-snomed-ct or info@snomed.org. Under the terms of the Affiliate License, use of SNOMED CT in countries that are not IHTSDO Members is subject to reporting and fee payment obligations. However, IHTSDO agrees to waive the requirements to report and pay fees for use of SNOMED CT content included in the LOINC Part Mapping and LOINC Term Associations for purposes that support or enable more effective use of LOINC. This material includes content from the US Edition to SNOMED CT, which is developed and maintained by the U.S. National Library of Medicine and is available to authorized UMLS Metathesaurus Licensees from the UTS Downloads site at https://uts.nlm.nih.gov.";
|
||||||
private final Map<String, TermConcept> myCode2Concept;
|
private static final String LOINC_PUBCHEM_PART_MAP_URI = "http://loinc.org/cm/loinc-parts-to-pubchem";
|
||||||
private final TermCodeSystemVersion myCodeSystemVersion;
|
private static final String LOINC_PUBCHEM_PART_MAP_ID = "loinc-parts-to-pubchem";
|
||||||
private final List<ConceptMap> myConceptMaps;
|
private static final String LOINC_PUBCHEM_PART_MAP_NAME = "LOINC Part Map to PubChem";
|
||||||
|
|
||||||
public LoincPartRelatedCodeMappingHandler(TermCodeSystemVersion theCodeSystemVersion, Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
public LoincPartRelatedCodeMappingHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
||||||
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
|
super(theCode2concept, theValueSets, theConceptMaps, theUploadProperties);
|
||||||
myCodeSystemVersion = theCodeSystemVersion;
|
|
||||||
myCode2Concept = theCode2concept;
|
|
||||||
myConceptMaps = theConceptMaps;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -108,12 +107,20 @@ public class LoincPartRelatedCodeMappingHandler extends BaseLoincHandler impleme
|
||||||
loincPartMapName = LOINC_RXNORM_PART_MAP_NAME;
|
loincPartMapName = LOINC_RXNORM_PART_MAP_NAME;
|
||||||
break;
|
break;
|
||||||
case "http://www.radlex.org":
|
case "http://www.radlex.org":
|
||||||
loincPartMapId = LOINC_RADLEX_PART_MAP_ID;
|
loincPartMapId = LOINC_PART_TO_RID_PART_MAP_ID;
|
||||||
loincPartMapUri = LOINC_RADLEX_PART_MAP_URI;
|
loincPartMapUri = LOINC_PART_TO_RID_PART_MAP_URI;
|
||||||
loincPartMapName = LOINC_RADLEX_PART_MAP_NAME;
|
loincPartMapName = LOINC_PART_TO_RID_PART_MAP_NAME;
|
||||||
|
break;
|
||||||
|
case "http://pubchem.ncbi.nlm.nih.gov":
|
||||||
|
loincPartMapId = LOINC_PUBCHEM_PART_MAP_ID;
|
||||||
|
loincPartMapUri = LOINC_PUBCHEM_PART_MAP_URI;
|
||||||
|
loincPartMapName = LOINC_PUBCHEM_PART_MAP_NAME;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new InternalErrorException("Don't know how to handle mapping to system: " + extCodeSystem);
|
loincPartMapId = extCodeSystem.replaceAll("[^a-zA-Z]", "");
|
||||||
|
loincPartMapUri = extCodeSystem;
|
||||||
|
loincPartMapName = "Unknown Mapping";
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
addConceptMapEntry(
|
addConceptMapEntry(
|
||||||
|
|
|
@ -39,21 +39,16 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
|
||||||
public static final String RSNA_CODES_VS_ID = "loinc-rsna-radiology-playbook";
|
public static final String RSNA_CODES_VS_ID = "loinc-rsna-radiology-playbook";
|
||||||
public static final String RSNA_CODES_VS_URI = "http://loinc.org/vs/loinc-rsna-radiology-playbook";
|
public static final String RSNA_CODES_VS_URI = "http://loinc.org/vs/loinc-rsna-radiology-playbook";
|
||||||
public static final String RSNA_CODES_VS_NAME = "LOINC/RSNA Radiology Playbook";
|
public static final String RSNA_CODES_VS_NAME = "LOINC/RSNA Radiology Playbook";
|
||||||
public static final String RID_MAPPING_CM_ID = "LOINC-TO-RID-CODES-CM";
|
|
||||||
public static final String RID_MAPPING_CM_URI = "http://loinc.org/rid-codes";
|
|
||||||
public static final String RID_MAPPING_CM_NAME = "RSNA Playbook RID Codes Mapping";
|
|
||||||
public static final String RID_CS_URI = "http://www.radlex.org";
|
public static final String RID_CS_URI = "http://www.radlex.org";
|
||||||
public static final String RPID_MAPPING_CM_ID = "LOINC-TO-RPID-CODES-CM";
|
|
||||||
public static final String RPID_MAPPING_CM_URI = "http://loinc.org/rpid-codes";
|
|
||||||
public static final String RPID_MAPPING_CM_NAME = "RSNA Playbook RPID Codes Mapping";
|
|
||||||
/*
|
/*
|
||||||
* About these being the same - Per Dan:
|
* About these being the same - Per Dan Vreeman:
|
||||||
* We had some discussion about this, and both
|
* We had some discussion about this, and both
|
||||||
* RIDs (RadLex clinical terms) and RPIDs (Radlex Playbook Ids)
|
* RIDs (RadLex clinical terms) and RPIDs (Radlex Playbook Ids)
|
||||||
* belong to the same "code system" since they will never collide.
|
* belong to the same "code system" since they will never collide.
|
||||||
* The codesystem uri is "http://www.radlex.org". FYI, that's
|
* The codesystem uri is "http://www.radlex.org". FYI, that's
|
||||||
* now listed on the FHIR page:
|
* now listed on the FHIR page:
|
||||||
* https://www.hl7.org/fhir/terminologies-systems.html
|
* https://www.hl7.org/fhir/terminologies-systems.html
|
||||||
|
* -ja
|
||||||
*/
|
*/
|
||||||
public static final String RPID_CS_URI = RID_CS_URI;
|
public static final String RPID_CS_URI = RID_CS_URI;
|
||||||
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC/RSNA Radiology Playbook and the LOINC Part File contain content from RadLex® (http://rsna.org/RadLex.aspx), copyright © 2005-2017, The Radiological Society of North America, Inc., available at no cost under the license at http://www.rsna.org/uploadedFiles/RSNA/Content/Informatics/RadLex_License_Agreement_and_Terms_of_Use_V2_Final.pdf.";
|
private static final String CM_COPYRIGHT = "This content from LOINC® is copyright © 1995 Regenstrief Institute, Inc. and the LOINC Committee, and available at no cost under the license at https://loinc.org/license/. The LOINC/RSNA Radiology Playbook and the LOINC Part File contain content from RadLex® (http://rsna.org/RadLex.aspx), copyright © 2005-2017, The Radiological Society of North America, Inc., available at no cost under the license at http://www.rsna.org/uploadedFiles/RSNA/Content/Informatics/RadLex_License_Agreement_and_Terms_of_Use_V2_Final.pdf.";
|
||||||
|
@ -179,9 +174,9 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
|
||||||
if (isNotBlank(rid)) {
|
if (isNotBlank(rid)) {
|
||||||
addConceptMapEntry(
|
addConceptMapEntry(
|
||||||
new ConceptMapping()
|
new ConceptMapping()
|
||||||
.setConceptMapId(RID_MAPPING_CM_ID)
|
.setConceptMapId(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_ID)
|
||||||
.setConceptMapUri(RID_MAPPING_CM_URI)
|
.setConceptMapUri(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_URI)
|
||||||
.setConceptMapName(RID_MAPPING_CM_NAME)
|
.setConceptMapName(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_NAME)
|
||||||
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
|
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
|
||||||
.setSourceCode(partNumber)
|
.setSourceCode(partNumber)
|
||||||
.setSourceDisplay(partName)
|
.setSourceDisplay(partName)
|
||||||
|
@ -196,9 +191,9 @@ public class LoincRsnaPlaybookHandler extends BaseLoincHandler implements IRecor
|
||||||
if (isNotBlank(rpid)) {
|
if (isNotBlank(rpid)) {
|
||||||
addConceptMapEntry(
|
addConceptMapEntry(
|
||||||
new ConceptMapping()
|
new ConceptMapping()
|
||||||
.setConceptMapId(RPID_MAPPING_CM_ID)
|
.setConceptMapId(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_ID)
|
||||||
.setConceptMapUri(RPID_MAPPING_CM_URI)
|
.setConceptMapUri(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_URI)
|
||||||
.setConceptMapName(RPID_MAPPING_CM_NAME)
|
.setConceptMapName(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_NAME)
|
||||||
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
|
.setSourceCodeSystem(IHapiTerminologyLoaderSvc.LOINC_URI)
|
||||||
.setSourceCode(loincNumber)
|
.setSourceCode(loincNumber)
|
||||||
.setSourceDisplay(longCommonName)
|
.setSourceDisplay(longCommonName)
|
||||||
|
|
|
@ -34,7 +34,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
public class LoincUniversalOrderSetHandler extends BaseLoincHandler implements IRecordHandler {
|
public class LoincUniversalOrderSetHandler extends BaseLoincHandler implements IRecordHandler {
|
||||||
|
|
||||||
public static final String VS_ID = "loinc-universal-order-set-vs";
|
public static final String VS_ID = "loinc-universal-order-set-vs";
|
||||||
public static final String VS_URI = "http://loinc.org/fhir/loinc-universal-order-set";
|
public static final String VS_URI = "http://loinc.org/vs/loinc-universal-order-set";
|
||||||
public static final String VS_NAME = "LOINC Universal Order Set";
|
public static final String VS_NAME = "LOINC Universal Order Set";
|
||||||
|
|
||||||
public LoincUniversalOrderSetHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
public LoincUniversalOrderSetHandler(Map<String, TermConcept> theCode2concept, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps, Properties theUploadProperties) {
|
||||||
|
|
|
@ -83,14 +83,19 @@ public class ReindexController implements IReindexController {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
ourLog.error("Failure during reindex", e);
|
||||||
|
count = -1;
|
||||||
} finally {
|
} finally {
|
||||||
myReindexingLock.release();
|
myReindexingLock.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
if (count == null) {
|
if (count == null) {
|
||||||
|
ourLog.info("Reindex pass complete, no remaining resource to index");
|
||||||
myDontReindexUntil = System.currentTimeMillis() + DateUtils.MILLIS_PER_HOUR;
|
myDontReindexUntil = System.currentTimeMillis() + DateUtils.MILLIS_PER_HOUR;
|
||||||
} else {
|
} else {
|
||||||
|
ourLog.info("Reindex pass complete, {} remaining resource to index", count);
|
||||||
myDontReindexUntil = null;
|
myDontReindexUntil = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ import javax.annotation.PostConstruct;
|
||||||
import javax.annotation.PreDestroy;
|
import javax.annotation.PreDestroy;
|
||||||
|
|
||||||
import org.hl7.fhir.r4.hapi.ctx.DefaultProfileValidationSupport;
|
import org.hl7.fhir.r4.hapi.ctx.DefaultProfileValidationSupport;
|
||||||
import org.hl7.fhir.r4.hapi.ctx.ValidationSupportChain;
|
import org.hl7.fhir.r4.hapi.validation.ValidationSupportChain;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
||||||
|
|
||||||
DataSource dataSource = ProxyDataSourceBuilder
|
DataSource dataSource = ProxyDataSourceBuilder
|
||||||
.create(retVal)
|
.create(retVal)
|
||||||
// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
.logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL")
|
||||||
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
||||||
.countQuery(new ThreadQueryCountHolder())
|
.countQuery(new ThreadQueryCountHolder())
|
||||||
.build();
|
.build();
|
||||||
|
|
|
@ -331,7 +331,6 @@ public abstract class BaseJpaTest {
|
||||||
theSystemDao.expunge(new ExpungeOptions().setExpungeEverything(true));
|
theSystemDao.expunge(new ExpungeOptions().setExpungeEverything(true));
|
||||||
theDaoConfig.setExpungeEnabled(expungeEnabled);
|
theDaoConfig.setExpungeEnabled(expungeEnabled);
|
||||||
|
|
||||||
theSearchParamPresenceSvc.flushCachesForUnitTest();
|
|
||||||
theSearchParamRegistry.forceRefresh();
|
theSearchParamRegistry.forceRefresh();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,17 +24,33 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
|
||||||
|
|
||||||
private IIdType myConceptMapId;
|
private IIdType myConceptMapId;
|
||||||
|
|
||||||
@AfterClass
|
|
||||||
public static void afterClassClearContext() {
|
|
||||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
@Transactional
|
@Transactional
|
||||||
public void before02() {
|
public void before02() {
|
||||||
myConceptMapId = myConceptMapDao.create(createConceptMap(), mySrd).getId().toUnqualifiedVersionless();
|
myConceptMapId = myConceptMapDao.create(createConceptMap(), mySrd).getId().toUnqualifiedVersionless();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteConceptMap() {
|
||||||
|
myConceptMapDao.delete(myConceptMapId);
|
||||||
|
|
||||||
|
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||||
|
@Override
|
||||||
|
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||||
|
TranslationRequest translationRequest = new TranslationRequest();
|
||||||
|
translationRequest.getCodeableConcept().addCoding()
|
||||||
|
.setSystem(CS_URL)
|
||||||
|
.setCode("12345");
|
||||||
|
translationRequest.setTargetSystem(new UriType(CS_URL_3));
|
||||||
|
|
||||||
|
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
||||||
|
|
||||||
|
assertFalse(translationResult.getResult().booleanValue());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTranslateByCodeSystemsAndSourceCodeOneToMany() {
|
public void testTranslateByCodeSystemsAndSourceCodeOneToMany() {
|
||||||
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
||||||
|
@ -81,4 +97,9 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClassClearContext() {
|
||||||
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,7 +64,6 @@ public class FhirResourceDaoDstu3SearchCustomSearchParamTest extends BaseJpaDstu
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCreateInvalidParamNoPath() {
|
public void testCreateInvalidParamNoPath() {
|
||||||
SearchParameter fooSp = new SearchParameter();
|
SearchParameter fooSp = new SearchParameter();
|
||||||
|
@ -858,6 +857,49 @@ public class FhirResourceDaoDstu3SearchCustomSearchParamTest extends BaseJpaDstu
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSearchParameterDescendsIntoContainedResource() {
|
||||||
|
SearchParameter sp = new SearchParameter();
|
||||||
|
sp.addBase("Observation");
|
||||||
|
sp.setCode("specimencollectedtime");
|
||||||
|
sp.setType(Enumerations.SearchParamType.DATE);
|
||||||
|
sp.setTitle("Observation Specimen Collected Time");
|
||||||
|
sp.setExpression("Observation.specimen.resolve().receivedTime");
|
||||||
|
sp.setXpathUsage(SearchParameter.XPathUsageType.NORMAL);
|
||||||
|
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||||
|
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(sp));
|
||||||
|
mySearchParameterDao.create(sp);
|
||||||
|
|
||||||
|
mySearchParamRegsitry.forceRefresh();
|
||||||
|
|
||||||
|
Specimen specimen = new Specimen();
|
||||||
|
specimen.setId("#FOO");
|
||||||
|
specimen.setReceivedTimeElement(new DateTimeType("2011-01-01"));
|
||||||
|
Observation o = new Observation();
|
||||||
|
o.setId("O1");
|
||||||
|
o.getContained().add(specimen);
|
||||||
|
o.setStatus(Observation.ObservationStatus.FINAL);
|
||||||
|
o.setSpecimen(new Reference("#FOO"));
|
||||||
|
myObservationDao.update(o);
|
||||||
|
|
||||||
|
specimen = new Specimen();
|
||||||
|
specimen.setId("#FOO");
|
||||||
|
specimen.setReceivedTimeElement(new DateTimeType("2011-01-03"));
|
||||||
|
o = new Observation();
|
||||||
|
o.setId("O2");
|
||||||
|
o.getContained().add(specimen);
|
||||||
|
o.setStatus(Observation.ObservationStatus.FINAL);
|
||||||
|
o.setSpecimen(new Reference("#FOO"));
|
||||||
|
myObservationDao.update(o);
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
params.add("specimencollectedtime", new DateParam("2011-01-01"));
|
||||||
|
IBundleProvider outcome = myObservationDao.search(params);
|
||||||
|
List<String> ids = toUnqualifiedVersionlessIdValues(outcome);
|
||||||
|
ourLog.info("IDS: " + ids);
|
||||||
|
assertThat(ids, contains("Observation/O1"));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithCustomParam() {
|
public void testSearchWithCustomParam() {
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
|
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
|
||||||
import org.hl7.fhir.dstu3.hapi.validation.DefaultProfileValidationSupport;
|
import org.hl7.fhir.dstu3.hapi.validation.DefaultProfileValidationSupport;
|
||||||
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||||
|
@ -81,7 +82,8 @@ public class SearchParamExtractorDstu3Test {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(ourCtx, ourValidationSupport, searchParamRegistry);
|
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(new DaoConfig(), ourCtx, ourValidationSupport, searchParamRegistry);
|
||||||
|
extractor.start();
|
||||||
Set<BaseResourceIndexedSearchParam> tokens = extractor.extractSearchParamTokens(new ResourceTable(), obs);
|
Set<BaseResourceIndexedSearchParam> tokens = extractor.extractSearchParamTokens(new ResourceTable(), obs);
|
||||||
assertEquals(1, tokens.size());
|
assertEquals(1, tokens.size());
|
||||||
ResourceIndexedSearchParamToken token = (ResourceIndexedSearchParamToken) tokens.iterator().next();
|
ResourceIndexedSearchParamToken token = (ResourceIndexedSearchParamToken) tokens.iterator().next();
|
||||||
|
|
|
@ -66,8 +66,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
@Autowired
|
@Autowired
|
||||||
protected IResourceLinkDao myResourceLinkDao;
|
protected IResourceLinkDao myResourceLinkDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
protected ISearchParamDao mySearchParamDao;
|
|
||||||
@Autowired
|
|
||||||
protected ISearchParamPresentDao mySearchParamPresentDao;
|
protected ISearchParamPresentDao mySearchParamPresentDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
protected IResourceIndexedSearchParamStringDao myResourceIndexedSearchParamStringDao;
|
protected IResourceIndexedSearchParamStringDao myResourceIndexedSearchParamStringDao;
|
||||||
|
@ -170,6 +168,12 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
@Qualifier("myPatientDaoR4")
|
@Qualifier("myPatientDaoR4")
|
||||||
protected IFhirResourceDaoPatient<Patient> myPatientDao;
|
protected IFhirResourceDaoPatient<Patient> myPatientDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
protected IResourceTableDao myResourceTableDao;
|
||||||
|
@Autowired
|
||||||
|
protected IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||||
|
@Autowired
|
||||||
|
protected IForcedIdDao myForcedIdDao;
|
||||||
|
@Autowired
|
||||||
@Qualifier("myCoverageDaoR4")
|
@Qualifier("myCoverageDaoR4")
|
||||||
protected IFhirResourceDao<Coverage> myCoverageDao;
|
protected IFhirResourceDao<Coverage> myCoverageDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -188,10 +192,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
@Qualifier("myResourceProvidersR4")
|
@Qualifier("myResourceProvidersR4")
|
||||||
protected Object myResourceProviders;
|
protected Object myResourceProviders;
|
||||||
@Autowired
|
@Autowired
|
||||||
protected IResourceTableDao myResourceTableDao;
|
|
||||||
@Autowired
|
|
||||||
protected IResourceHistoryTableDao myResourceHistoryTableDao;
|
|
||||||
@Autowired
|
|
||||||
protected IResourceTagDao myResourceTagDao;
|
protected IResourceTagDao myResourceTagDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
||||||
|
@ -257,6 +257,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
myDaoConfig.setExpireSearchResultsAfterMillis(new DaoConfig().getExpireSearchResultsAfterMillis());
|
myDaoConfig.setExpireSearchResultsAfterMillis(new DaoConfig().getExpireSearchResultsAfterMillis());
|
||||||
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
|
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
|
||||||
myDaoConfig.setSuppressUpdatesWithNoChange(new DaoConfig().isSuppressUpdatesWithNoChange());
|
myDaoConfig.setSuppressUpdatesWithNoChange(new DaoConfig().isSuppressUpdatesWithNoChange());
|
||||||
|
myDaoConfig.setAllowContainsSearches(new DaoConfig().isAllowContainsSearches());
|
||||||
}
|
}
|
||||||
|
|
||||||
@After
|
@After
|
||||||
|
@ -283,7 +284,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void beforeFlushFT() {
|
public void beforeFlushFT() {
|
||||||
runInTransaction(()->{
|
runInTransaction(() -> {
|
||||||
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
|
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
|
||||||
ftem.purgeAll(ResourceTable.class);
|
ftem.purgeAll(ResourceTable.class);
|
||||||
ftem.purgeAll(ResourceIndexedSearchParamString.class);
|
ftem.purgeAll(ResourceIndexedSearchParamString.class);
|
||||||
|
@ -314,6 +315,11 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
return myFhirCtx;
|
return myFhirCtx;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PlatformTransactionManager getTxManager() {
|
||||||
|
return myTxManager;
|
||||||
|
}
|
||||||
|
|
||||||
protected <T extends IBaseResource> T loadResourceFromClasspath(Class<T> type, String resourceName) throws IOException {
|
protected <T extends IBaseResource> T loadResourceFromClasspath(Class<T> type, String resourceName) throws IOException {
|
||||||
InputStream stream = FhirResourceDaoDstu2SearchNoFtTest.class.getResourceAsStream(resourceName);
|
InputStream stream = FhirResourceDaoDstu2SearchNoFtTest.class.getResourceAsStream(resourceName);
|
||||||
if (stream == null) {
|
if (stream == null) {
|
||||||
|
@ -324,11 +330,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
|
||||||
return newJsonParser.parseResource(type, string);
|
return newJsonParser.parseResource(type, string);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected PlatformTransactionManager getTxManager() {
|
|
||||||
return myTxManager;
|
|
||||||
}
|
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void afterClassClearContextBaseJpaR4Test() throws Exception {
|
public static void afterClassClearContextBaseJpaR4Test() throws Exception {
|
||||||
ourValueSetDao.purgeCaches();
|
ourValueSetDao.purgeCaches();
|
||||||
|
|
|
@ -26,17 +26,33 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
private IIdType myConceptMapId;
|
private IIdType myConceptMapId;
|
||||||
|
|
||||||
@AfterClass
|
|
||||||
public static void afterClassClearContext() {
|
|
||||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
@Transactional
|
@Transactional
|
||||||
public void before02() {
|
public void before02() {
|
||||||
myConceptMapId = myConceptMapDao.create(createConceptMap(), mySrd).getId().toUnqualifiedVersionless();
|
myConceptMapId = myConceptMapDao.create(createConceptMap(), mySrd).getId().toUnqualifiedVersionless();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteConceptMap() {
|
||||||
|
myConceptMapDao.delete(myConceptMapId);
|
||||||
|
|
||||||
|
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||||
|
@Override
|
||||||
|
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||||
|
TranslationRequest translationRequest = new TranslationRequest();
|
||||||
|
translationRequest.getCodeableConcept().addCoding()
|
||||||
|
.setSystem(CS_URL)
|
||||||
|
.setCode("12345");
|
||||||
|
translationRequest.setTargetSystem(new UriType(CS_URL_3));
|
||||||
|
|
||||||
|
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
||||||
|
|
||||||
|
assertFalse(translationResult.getResult().booleanValue());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTranslateByCodeSystemsAndSourceCodeOneToMany() {
|
public void testTranslateByCodeSystemsAndSourceCodeOneToMany() {
|
||||||
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
||||||
|
@ -205,6 +221,98 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTranslateUsingPredicatesWithSourceAndTargetSystem2() {
|
||||||
|
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
||||||
|
|
||||||
|
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
||||||
|
|
||||||
|
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||||
|
@Override
|
||||||
|
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||||
|
/*
|
||||||
|
* Provided:
|
||||||
|
* source code
|
||||||
|
* source code system
|
||||||
|
* target code system #2
|
||||||
|
*/
|
||||||
|
TranslationRequest translationRequest = new TranslationRequest();
|
||||||
|
translationRequest.getCodeableConcept().addCoding()
|
||||||
|
.setSystem(CS_URL)
|
||||||
|
.setCode("12345");
|
||||||
|
translationRequest.setTargetSystem(new UriType(CS_URL_2));
|
||||||
|
|
||||||
|
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
||||||
|
|
||||||
|
assertTrue(translationResult.getResult().booleanValue());
|
||||||
|
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
|
||||||
|
|
||||||
|
assertEquals(1, translationResult.getMatches().size());
|
||||||
|
|
||||||
|
TranslationMatch translationMatch = translationResult.getMatches().get(0);
|
||||||
|
assertEquals(ConceptMapEquivalence.EQUAL.toCode(), translationMatch.getEquivalence().getCode());
|
||||||
|
Coding concept = translationMatch.getConcept();
|
||||||
|
assertEquals("34567", concept.getCode());
|
||||||
|
assertEquals("Target Code 34567", concept.getDisplay());
|
||||||
|
assertEquals(CS_URL_2, concept.getSystem());
|
||||||
|
assertEquals("Version 2", concept.getVersion());
|
||||||
|
assertFalse(concept.getUserSelected());
|
||||||
|
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTranslateUsingPredicatesWithSourceAndTargetSystem3() {
|
||||||
|
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
||||||
|
|
||||||
|
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
||||||
|
|
||||||
|
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||||
|
@Override
|
||||||
|
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||||
|
/*
|
||||||
|
* Provided:
|
||||||
|
* source code
|
||||||
|
* source code system
|
||||||
|
* target code system #3
|
||||||
|
*/
|
||||||
|
TranslationRequest translationRequest = new TranslationRequest();
|
||||||
|
translationRequest.getCodeableConcept().addCoding()
|
||||||
|
.setSystem(CS_URL)
|
||||||
|
.setCode("12345");
|
||||||
|
translationRequest.setTargetSystem(new UriType(CS_URL_3));
|
||||||
|
|
||||||
|
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
||||||
|
|
||||||
|
assertTrue(translationResult.getResult().booleanValue());
|
||||||
|
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
|
||||||
|
|
||||||
|
assertEquals(2, translationResult.getMatches().size());
|
||||||
|
|
||||||
|
TranslationMatch translationMatch = translationResult.getMatches().get(0);
|
||||||
|
assertEquals(ConceptMapEquivalence.EQUAL.toCode(), translationMatch.getEquivalence().getCode());
|
||||||
|
Coding concept = translationMatch.getConcept();
|
||||||
|
assertEquals("56789", concept.getCode());
|
||||||
|
assertEquals("Target Code 56789", concept.getDisplay());
|
||||||
|
assertEquals(CS_URL_3, concept.getSystem());
|
||||||
|
assertEquals("Version 4", concept.getVersion());
|
||||||
|
assertFalse(concept.getUserSelected());
|
||||||
|
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
||||||
|
|
||||||
|
translationMatch = translationResult.getMatches().get(1);
|
||||||
|
assertEquals(ConceptMapEquivalence.WIDER.toCode(), translationMatch.getEquivalence().getCode());
|
||||||
|
concept = translationMatch.getConcept();
|
||||||
|
assertEquals("67890", concept.getCode());
|
||||||
|
assertEquals("Target Code 67890", concept.getDisplay());
|
||||||
|
assertEquals(CS_URL_3, concept.getSystem());
|
||||||
|
assertEquals("Version 4", concept.getVersion());
|
||||||
|
assertFalse(concept.getUserSelected());
|
||||||
|
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTranslateUsingPredicatesWithSourceSystem() {
|
public void testTranslateUsingPredicatesWithSourceSystem() {
|
||||||
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
||||||
|
@ -356,98 +464,6 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testTranslateUsingPredicatesWithSourceAndTargetSystem2() {
|
|
||||||
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
|
||||||
|
|
||||||
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
|
||||||
|
|
||||||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
|
||||||
@Override
|
|
||||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
|
||||||
/*
|
|
||||||
* Provided:
|
|
||||||
* source code
|
|
||||||
* source code system
|
|
||||||
* target code system #2
|
|
||||||
*/
|
|
||||||
TranslationRequest translationRequest = new TranslationRequest();
|
|
||||||
translationRequest.getCodeableConcept().addCoding()
|
|
||||||
.setSystem(CS_URL)
|
|
||||||
.setCode("12345");
|
|
||||||
translationRequest.setTargetSystem(new UriType(CS_URL_2));
|
|
||||||
|
|
||||||
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
|
||||||
|
|
||||||
assertTrue(translationResult.getResult().booleanValue());
|
|
||||||
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
|
|
||||||
|
|
||||||
assertEquals(1, translationResult.getMatches().size());
|
|
||||||
|
|
||||||
TranslationMatch translationMatch = translationResult.getMatches().get(0);
|
|
||||||
assertEquals(ConceptMapEquivalence.EQUAL.toCode(), translationMatch.getEquivalence().getCode());
|
|
||||||
Coding concept = translationMatch.getConcept();
|
|
||||||
assertEquals("34567", concept.getCode());
|
|
||||||
assertEquals("Target Code 34567", concept.getDisplay());
|
|
||||||
assertEquals(CS_URL_2, concept.getSystem());
|
|
||||||
assertEquals("Version 2", concept.getVersion());
|
|
||||||
assertFalse(concept.getUserSelected());
|
|
||||||
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testTranslateUsingPredicatesWithSourceAndTargetSystem3() {
|
|
||||||
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
|
||||||
|
|
||||||
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
|
||||||
|
|
||||||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
|
||||||
@Override
|
|
||||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
|
||||||
/*
|
|
||||||
* Provided:
|
|
||||||
* source code
|
|
||||||
* source code system
|
|
||||||
* target code system #3
|
|
||||||
*/
|
|
||||||
TranslationRequest translationRequest = new TranslationRequest();
|
|
||||||
translationRequest.getCodeableConcept().addCoding()
|
|
||||||
.setSystem(CS_URL)
|
|
||||||
.setCode("12345");
|
|
||||||
translationRequest.setTargetSystem(new UriType(CS_URL_3));
|
|
||||||
|
|
||||||
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
|
||||||
|
|
||||||
assertTrue(translationResult.getResult().booleanValue());
|
|
||||||
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
|
|
||||||
|
|
||||||
assertEquals(2, translationResult.getMatches().size());
|
|
||||||
|
|
||||||
TranslationMatch translationMatch = translationResult.getMatches().get(0);
|
|
||||||
assertEquals(ConceptMapEquivalence.EQUAL.toCode(), translationMatch.getEquivalence().getCode());
|
|
||||||
Coding concept = translationMatch.getConcept();
|
|
||||||
assertEquals("56789", concept.getCode());
|
|
||||||
assertEquals("Target Code 56789", concept.getDisplay());
|
|
||||||
assertEquals(CS_URL_3, concept.getSystem());
|
|
||||||
assertEquals("Version 4", concept.getVersion());
|
|
||||||
assertFalse(concept.getUserSelected());
|
|
||||||
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
|
||||||
|
|
||||||
translationMatch = translationResult.getMatches().get(1);
|
|
||||||
assertEquals(ConceptMapEquivalence.WIDER.toCode(), translationMatch.getEquivalence().getCode());
|
|
||||||
concept = translationMatch.getConcept();
|
|
||||||
assertEquals("67890", concept.getCode());
|
|
||||||
assertEquals("Target Code 67890", concept.getDisplay());
|
|
||||||
assertEquals(CS_URL_3, concept.getSystem());
|
|
||||||
assertEquals("Version 4", concept.getVersion());
|
|
||||||
assertFalse(concept.getUserSelected());
|
|
||||||
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTranslateUsingPredicatesWithSourceValueSet() {
|
public void testTranslateUsingPredicatesWithSourceValueSet() {
|
||||||
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
||||||
|
@ -686,6 +702,92 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTranslateWithReverseUsingPredicatesWithSourceAndTargetSystem1() {
|
||||||
|
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
||||||
|
|
||||||
|
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
||||||
|
|
||||||
|
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||||
|
@Override
|
||||||
|
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||||
|
/*
|
||||||
|
* Provided:
|
||||||
|
* source code
|
||||||
|
* source code system
|
||||||
|
* target code system #1
|
||||||
|
* reverse = true
|
||||||
|
*/
|
||||||
|
TranslationRequest translationRequest = new TranslationRequest();
|
||||||
|
translationRequest.getCodeableConcept().addCoding()
|
||||||
|
.setSystem(CS_URL_2)
|
||||||
|
.setCode("34567");
|
||||||
|
translationRequest.setTargetSystem(new UriType(CS_URL));
|
||||||
|
translationRequest.setReverse(true);
|
||||||
|
|
||||||
|
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
||||||
|
|
||||||
|
assertTrue(translationResult.getResult().booleanValue());
|
||||||
|
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
|
||||||
|
|
||||||
|
assertEquals(1, translationResult.getMatches().size());
|
||||||
|
|
||||||
|
TranslationMatch translationMatch = translationResult.getMatches().get(0);
|
||||||
|
assertNull(translationMatch.getEquivalence());
|
||||||
|
Coding concept = translationMatch.getConcept();
|
||||||
|
assertEquals("12345", concept.getCode());
|
||||||
|
assertEquals("Source Code 12345", concept.getDisplay());
|
||||||
|
assertEquals(CS_URL, concept.getSystem());
|
||||||
|
assertEquals("Version 1", concept.getVersion());
|
||||||
|
assertFalse(concept.getUserSelected());
|
||||||
|
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTranslateWithReverseUsingPredicatesWithSourceAndTargetSystem4() {
|
||||||
|
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
||||||
|
|
||||||
|
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
||||||
|
|
||||||
|
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
||||||
|
@Override
|
||||||
|
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
||||||
|
/*
|
||||||
|
* Provided:
|
||||||
|
* source code
|
||||||
|
* source code system
|
||||||
|
* target code system #4
|
||||||
|
* reverse = true
|
||||||
|
*/
|
||||||
|
TranslationRequest translationRequest = new TranslationRequest();
|
||||||
|
translationRequest.getCodeableConcept().addCoding()
|
||||||
|
.setSystem(CS_URL_2)
|
||||||
|
.setCode("34567");
|
||||||
|
translationRequest.setTargetSystem(new UriType(CS_URL_4));
|
||||||
|
translationRequest.setReverse(true);
|
||||||
|
|
||||||
|
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
||||||
|
|
||||||
|
assertTrue(translationResult.getResult().booleanValue());
|
||||||
|
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
|
||||||
|
|
||||||
|
assertEquals(1, translationResult.getMatches().size());
|
||||||
|
|
||||||
|
TranslationMatch translationMatch = translationResult.getMatches().get(0);
|
||||||
|
assertNull(translationMatch.getEquivalence());
|
||||||
|
Coding concept = translationMatch.getConcept();
|
||||||
|
assertEquals("78901", concept.getCode());
|
||||||
|
assertEquals("Source Code 78901", concept.getDisplay());
|
||||||
|
assertEquals(CS_URL_4, concept.getSystem());
|
||||||
|
assertEquals("Version 5", concept.getVersion());
|
||||||
|
assertFalse(concept.getUserSelected());
|
||||||
|
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTranslateWithReverseUsingPredicatesWithSourceSystem() {
|
public void testTranslateWithReverseUsingPredicatesWithSourceSystem() {
|
||||||
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
||||||
|
@ -790,92 +892,6 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testTranslateWithReverseUsingPredicatesWithSourceAndTargetSystem1() {
|
|
||||||
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
|
||||||
|
|
||||||
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
|
||||||
|
|
||||||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
|
||||||
@Override
|
|
||||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
|
||||||
/*
|
|
||||||
* Provided:
|
|
||||||
* source code
|
|
||||||
* source code system
|
|
||||||
* target code system #1
|
|
||||||
* reverse = true
|
|
||||||
*/
|
|
||||||
TranslationRequest translationRequest = new TranslationRequest();
|
|
||||||
translationRequest.getCodeableConcept().addCoding()
|
|
||||||
.setSystem(CS_URL_2)
|
|
||||||
.setCode("34567");
|
|
||||||
translationRequest.setTargetSystem(new UriType(CS_URL));
|
|
||||||
translationRequest.setReverse(true);
|
|
||||||
|
|
||||||
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
|
||||||
|
|
||||||
assertTrue(translationResult.getResult().booleanValue());
|
|
||||||
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
|
|
||||||
|
|
||||||
assertEquals(1, translationResult.getMatches().size());
|
|
||||||
|
|
||||||
TranslationMatch translationMatch = translationResult.getMatches().get(0);
|
|
||||||
assertNull(translationMatch.getEquivalence());
|
|
||||||
Coding concept = translationMatch.getConcept();
|
|
||||||
assertEquals("12345", concept.getCode());
|
|
||||||
assertEquals("Source Code 12345", concept.getDisplay());
|
|
||||||
assertEquals(CS_URL, concept.getSystem());
|
|
||||||
assertEquals("Version 1", concept.getVersion());
|
|
||||||
assertFalse(concept.getUserSelected());
|
|
||||||
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testTranslateWithReverseUsingPredicatesWithSourceAndTargetSystem4() {
|
|
||||||
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
|
||||||
|
|
||||||
ourLog.info("ConceptMap:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
|
||||||
|
|
||||||
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
|
|
||||||
@Override
|
|
||||||
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
|
|
||||||
/*
|
|
||||||
* Provided:
|
|
||||||
* source code
|
|
||||||
* source code system
|
|
||||||
* target code system #4
|
|
||||||
* reverse = true
|
|
||||||
*/
|
|
||||||
TranslationRequest translationRequest = new TranslationRequest();
|
|
||||||
translationRequest.getCodeableConcept().addCoding()
|
|
||||||
.setSystem(CS_URL_2)
|
|
||||||
.setCode("34567");
|
|
||||||
translationRequest.setTargetSystem(new UriType(CS_URL_4));
|
|
||||||
translationRequest.setReverse(true);
|
|
||||||
|
|
||||||
TranslationResult translationResult = myConceptMapDao.translate(translationRequest, null);
|
|
||||||
|
|
||||||
assertTrue(translationResult.getResult().booleanValue());
|
|
||||||
assertEquals("Matches found!", translationResult.getMessage().getValueAsString());
|
|
||||||
|
|
||||||
assertEquals(1, translationResult.getMatches().size());
|
|
||||||
|
|
||||||
TranslationMatch translationMatch = translationResult.getMatches().get(0);
|
|
||||||
assertNull(translationMatch.getEquivalence());
|
|
||||||
Coding concept = translationMatch.getConcept();
|
|
||||||
assertEquals("78901", concept.getCode());
|
|
||||||
assertEquals("Source Code 78901", concept.getDisplay());
|
|
||||||
assertEquals(CS_URL_4, concept.getSystem());
|
|
||||||
assertEquals("Version 5", concept.getVersion());
|
|
||||||
assertFalse(concept.getUserSelected());
|
|
||||||
assertEquals(CM_URL, translationMatch.getSource().getValueAsString());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTranslateWithReverseUsingPredicatesWithSourceValueSet() {
|
public void testTranslateWithReverseUsingPredicatesWithSourceValueSet() {
|
||||||
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
ConceptMap conceptMap = myConceptMapDao.read(myConceptMapId);
|
||||||
|
@ -977,4 +993,9 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClassClearContext() {
|
||||||
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -128,7 +128,7 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCreateSearchParameterOnSearchParameterDoesntCauseEndlessReindexLoop() throws InterruptedException {
|
public void testCreateSearchParameterOnSearchParameterDoesntCauseEndlessReindexLoop() {
|
||||||
SearchParameter fooSp = new SearchParameter();
|
SearchParameter fooSp = new SearchParameter();
|
||||||
fooSp.setCode("foo");
|
fooSp.setCode("foo");
|
||||||
fooSp.addBase("SearchParameter");
|
fooSp.addBase("SearchParameter");
|
||||||
|
@ -355,7 +355,6 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
|
||||||
assertThat(results, contains(mrId));
|
assertThat(results, contains(mrId));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* See #863
|
* See #863
|
||||||
*/
|
*/
|
||||||
|
@ -709,6 +708,12 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
|
||||||
@Override
|
@Override
|
||||||
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||||
mySearchParameterDao.create(siblingSp, mySrd);
|
mySearchParameterDao.create(siblingSp, mySrd);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
txTemplate.execute(new TransactionCallbackWithoutResult() {
|
||||||
|
@Override
|
||||||
|
protected void doInTransactionWithoutResult(TransactionStatus theArg0) {
|
||||||
mySearchParamRegsitry.forceRefresh();
|
mySearchParamRegsitry.forceRefresh();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -1032,6 +1037,48 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSearchParameterDescendsIntoContainedResource() {
|
||||||
|
SearchParameter sp = new SearchParameter();
|
||||||
|
sp.addBase("Observation");
|
||||||
|
sp.setCode("specimencollectedtime");
|
||||||
|
sp.setType(Enumerations.SearchParamType.DATE);
|
||||||
|
sp.setTitle("Observation Specimen Collected Time");
|
||||||
|
sp.setExpression("Observation.specimen.resolve().receivedTime");
|
||||||
|
sp.setXpathUsage(org.hl7.fhir.r4.model.SearchParameter.XPathUsageType.NORMAL);
|
||||||
|
sp.setStatus(org.hl7.fhir.r4.model.Enumerations.PublicationStatus.ACTIVE);
|
||||||
|
mySearchParameterDao.create(sp);
|
||||||
|
|
||||||
|
mySearchParamRegsitry.forceRefresh();
|
||||||
|
|
||||||
|
Specimen specimen = new Specimen();
|
||||||
|
specimen.setId("#FOO");
|
||||||
|
specimen.setReceivedTimeElement(new DateTimeType("2011-01-01"));
|
||||||
|
Observation o = new Observation();
|
||||||
|
o.setId("O1");
|
||||||
|
o.getContained().add(specimen);
|
||||||
|
o.setStatus(Observation.ObservationStatus.FINAL);
|
||||||
|
o.setSpecimen(new Reference("#FOO"));
|
||||||
|
myObservationDao.update(o);
|
||||||
|
|
||||||
|
specimen = new Specimen();
|
||||||
|
specimen.setId("#FOO");
|
||||||
|
specimen.setReceivedTimeElement(new DateTimeType("2011-01-03"));
|
||||||
|
o = new Observation();
|
||||||
|
o.setId("O2");
|
||||||
|
o.getContained().add(specimen);
|
||||||
|
o.setStatus(Observation.ObservationStatus.FINAL);
|
||||||
|
o.setSpecimen(new Reference("#FOO"));
|
||||||
|
myObservationDao.update(o);
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
params.add("specimencollectedtime", new DateParam("2011-01-01"));
|
||||||
|
IBundleProvider outcome = myObservationDao.search(params);
|
||||||
|
List<String> ids = toUnqualifiedVersionlessIdValues(outcome);
|
||||||
|
ourLog.info("IDS: " + ids);
|
||||||
|
assertThat(ids, contains("Observation/O1"));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithCustomParam() {
|
public void testSearchWithCustomParam() {
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,6 @@ public class FhirResourceDaoR4SearchMissingTest extends BaseJpaR4Test {
|
||||||
org.setActive(true);
|
org.setActive(true);
|
||||||
myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
|
myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
assertThat(mySearchParamDao.findAll(), empty());
|
|
||||||
assertThat(mySearchParamPresentDao.findAll(), empty());
|
assertThat(mySearchParamPresentDao.findAll(), empty());
|
||||||
assertThat(myResourceIndexedSearchParamStringDao.findAll(), empty());
|
assertThat(myResourceIndexedSearchParamStringDao.findAll(), empty());
|
||||||
assertThat(myResourceIndexedSearchParamDateDao.findAll(), empty());
|
assertThat(myResourceIndexedSearchParamDateDao.findAll(), empty());
|
||||||
|
|
|
@ -485,11 +485,18 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
||||||
Class<ResourceIndexedSearchParamNumber> type = ResourceIndexedSearchParamNumber.class;
|
Class<ResourceIndexedSearchParamNumber> type = ResourceIndexedSearchParamNumber.class;
|
||||||
List<ResourceIndexedSearchParamNumber> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
|
List<ResourceIndexedSearchParamNumber> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
|
||||||
ourLog.info(toStringMultiline(results));
|
ourLog.info(toStringMultiline(results));
|
||||||
assertThat(results, containsInAnyOrder(
|
|
||||||
((ResourceIndexedSearchParamNumber) (new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_SEQUENCE, null).setResource(resource).setMissing(true))),
|
ResourceIndexedSearchParamNumber expect0 = new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("2.00"));
|
||||||
((ResourceIndexedSearchParamNumber) (new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("1.00")).setResource(resource))),
|
expect0.setResource(resource);
|
||||||
((ResourceIndexedSearchParamNumber) (new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("2.00")).setResource(resource)))
|
expect0.calculateHashes();
|
||||||
));
|
ResourceIndexedSearchParamNumber expect1 = new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_SEQUENCE, null);
|
||||||
|
expect1.setResource(resource).setMissing(true);
|
||||||
|
expect1.calculateHashes();
|
||||||
|
ResourceIndexedSearchParamNumber expect2 = new ResourceIndexedSearchParamNumber(ImmunizationRecommendation.SP_DOSE_NUMBER, new BigDecimal("1.00"));
|
||||||
|
expect2.setResource(resource);
|
||||||
|
expect2.calculateHashes();
|
||||||
|
|
||||||
|
assertThat(results, containsInAnyOrder(expect0, expect1, expect2));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -504,10 +511,12 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
IIdType id = mySubstanceDao.create(res, mySrd).getId().toUnqualifiedVersionless();
|
IIdType id = mySubstanceDao.create(res, mySrd).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
Class<ResourceIndexedSearchParamQuantity> type = ResourceIndexedSearchParamQuantity.class;
|
runInTransaction(()->{
|
||||||
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
|
Class<ResourceIndexedSearchParamQuantity> type = ResourceIndexedSearchParamQuantity.class;
|
||||||
ourLog.info(toStringMultiline(results));
|
List<?> results = myEntityManager.createQuery("SELECT i FROM " + type.getSimpleName() + " i", type).getResultList();
|
||||||
assertEquals(2, results.size());
|
ourLog.info(toStringMultiline(results));
|
||||||
|
assertEquals(2, results.size());
|
||||||
|
});
|
||||||
|
|
||||||
List<IIdType> actual = toUnqualifiedVersionlessIds(
|
List<IIdType> actual = toUnqualifiedVersionlessIds(
|
||||||
mySubstanceDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Substance.SP_QUANTITY, new QuantityParam(null, 123, "http://foo", "UNIT"))));
|
mySubstanceDao.search(new SearchParameterMap().setLoadSynchronous(true).add(Substance.SP_QUANTITY, new QuantityParam(null, 123, "http://foo", "UNIT"))));
|
||||||
|
@ -2261,6 +2270,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithContains() {
|
public void testSearchWithContains() {
|
||||||
|
myDaoConfig.setAllowContainsSearches(true);
|
||||||
|
|
||||||
Patient pt1 = new Patient();
|
Patient pt1 = new Patient();
|
||||||
pt1.addName().setFamily("ABCDEFGHIJK");
|
pt1.addName().setFamily("ABCDEFGHIJK");
|
||||||
|
|
|
@ -1,10 +1,7 @@
|
||||||
package ca.uhn.fhir.jpa.dao.r4;
|
package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.dao.*;
|
import ca.uhn.fhir.jpa.dao.*;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceEncodingEnum;
|
import ca.uhn.fhir.jpa.entity.*;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
|
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
|
|
||||||
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
|
|
||||||
import ca.uhn.fhir.model.api.Include;
|
import ca.uhn.fhir.model.api.Include;
|
||||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||||
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
|
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
|
||||||
|
@ -152,6 +149,42 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeletedResourcesAreReindexed() {
|
||||||
|
myDaoConfig.setSchedulingDisabled(true);
|
||||||
|
|
||||||
|
Patient pt1 = new Patient();
|
||||||
|
pt1.setActive(true);
|
||||||
|
pt1.addName().setFamily("FAM");
|
||||||
|
IIdType id1 = myPatientDao.create(pt1).getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
runInTransaction(()->{
|
||||||
|
assertThat(myResourceIndexedSearchParamTokenDao.countForResourceId(id1.getIdPartAsLong()), greaterThan(0));
|
||||||
|
});
|
||||||
|
|
||||||
|
runInTransaction(()->{
|
||||||
|
Optional<ResourceTable> tableOpt = myResourceTableDao.findById(id1.getIdPartAsLong());
|
||||||
|
assertTrue(tableOpt.isPresent());
|
||||||
|
ResourceTable table = tableOpt.get();
|
||||||
|
table.setIndexStatus(null);
|
||||||
|
table.setDeleted(new Date());
|
||||||
|
});
|
||||||
|
|
||||||
|
mySystemDao.performReindexingPass(1000);
|
||||||
|
mySystemDao.performReindexingPass(1000);
|
||||||
|
|
||||||
|
runInTransaction(()->{
|
||||||
|
Optional<ResourceTable> tableOpt = myResourceTableDao.findById(id1.getIdPartAsLong());
|
||||||
|
assertTrue(tableOpt.isPresent());
|
||||||
|
assertEquals(BaseHapiFhirDao.INDEX_STATUS_INDEXED, tableOpt.get().getIndexStatus().longValue());
|
||||||
|
assertThat(myResourceIndexedSearchParamTokenDao.countForResourceId(id1.getIdPartAsLong()), not(greaterThan(0)));
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCantSearchForDeletedResourceByLanguageOrTag() {
|
public void testCantSearchForDeletedResourceByLanguageOrTag() {
|
||||||
String methodName = "testCantSearchForDeletedResourceByLanguageOrTag";
|
String methodName = "testCantSearchForDeletedResourceByLanguageOrTag";
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
package ca.uhn.fhir.jpa.dao.r4;
|
package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.SearchBuilder;
|
import ca.uhn.fhir.jpa.dao.SearchBuilder;
|
||||||
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
import ca.uhn.fhir.jpa.dao.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedCompositeStringUnique;
|
import ca.uhn.fhir.jpa.entity.ResourceIndexedCompositeStringUnique;
|
||||||
|
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||||
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
|
import ca.uhn.fhir.jpa.search.JpaRuntimeSearchParam;
|
||||||
import ca.uhn.fhir.jpa.util.JpaConstants;
|
import ca.uhn.fhir.jpa.util.JpaConstants;
|
||||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
@ -27,6 +29,7 @@ import org.springframework.transaction.support.TransactionTemplate;
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.*;
|
||||||
|
|
|
@ -679,6 +679,8 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
||||||
ourLog.info("Now have {} inserts", QueryCountHolder.getGrandTotal().getInsert());
|
ourLog.info("Now have {} inserts", QueryCountHolder.getGrandTotal().getInsert());
|
||||||
QueryCountHolder.clear();
|
QueryCountHolder.clear();
|
||||||
|
|
||||||
|
ourLog.info("** About to update");
|
||||||
|
|
||||||
pt.setId(id);
|
pt.setId(id);
|
||||||
pt.getNameFirstRep().addGiven("GIVEN1C");
|
pt.getNameFirstRep().addGiven("GIVEN1C");
|
||||||
myPatientDao.update(pt);
|
myPatientDao.update(pt);
|
||||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.dao.r4;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.ISearchParamRegistry;
|
import ca.uhn.fhir.jpa.dao.ISearchParamRegistry;
|
||||||
import ca.uhn.fhir.jpa.entity.BaseResourceIndexedSearchParam;
|
import ca.uhn.fhir.jpa.entity.BaseResourceIndexedSearchParam;
|
||||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
|
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
|
||||||
|
@ -80,7 +81,7 @@ public class SearchParamExtractorR4Test {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
SearchParamExtractorR4 extractor = new SearchParamExtractorR4(ourCtx, ourValidationSupport, searchParamRegistry);
|
SearchParamExtractorR4 extractor = new SearchParamExtractorR4(new DaoConfig(), ourCtx, ourValidationSupport, searchParamRegistry);
|
||||||
Set<BaseResourceIndexedSearchParam> tokens = extractor.extractSearchParamTokens(new ResourceTable(), obs);
|
Set<BaseResourceIndexedSearchParam> tokens = extractor.extractSearchParamTokens(new ResourceTable(), obs);
|
||||||
assertEquals(1, tokens.size());
|
assertEquals(1, tokens.size());
|
||||||
ResourceIndexedSearchParamToken token = (ResourceIndexedSearchParamToken) tokens.iterator().next();
|
ResourceIndexedSearchParamToken token = (ResourceIndexedSearchParamToken) tokens.iterator().next();
|
||||||
|
|
|
@ -19,19 +19,8 @@ public class ResourceIndexedSearchParamQuantityTest {
|
||||||
ResourceIndexedSearchParamQuantity token = createParam("NAME", "123.001", "value", "VALUE");
|
ResourceIndexedSearchParamQuantity token = createParam("NAME", "123.001", "value", "VALUE");
|
||||||
|
|
||||||
// Make sure our hashing function gives consistent results
|
// Make sure our hashing function gives consistent results
|
||||||
assertEquals(945335027461836896L, token.getHashUnitsAndValPrefix().longValue());
|
assertEquals(834432764963581074L, token.getHashIdentity().longValue());
|
||||||
assertEquals(5549105497508660145L, token.getHashValPrefix().longValue());
|
assertEquals(-1970227166134682431L, token.getHashIdentityAndUnits().longValue());
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testValueTrimming() {
|
|
||||||
assertEquals(7265149425397186226L, createParam("NAME", "401.001", "value", "VALUE").getHashUnitsAndValPrefix().longValue());
|
|
||||||
assertEquals(7265149425397186226L, createParam("NAME", "401.99999", "value", "VALUE").getHashUnitsAndValPrefix().longValue());
|
|
||||||
assertEquals(7265149425397186226L, createParam("NAME", "401", "value", "VALUE").getHashUnitsAndValPrefix().longValue());
|
|
||||||
// Should be different
|
|
||||||
assertEquals(-8387917096585386046L, createParam("NAME", "400.9999999", "value", "VALUE").getHashUnitsAndValPrefix().longValue());
|
|
||||||
// Should be different
|
|
||||||
assertEquals(8819656626732693650L, createParam("NAME", "402.000000", "value", "VALUE").getHashUnitsAndValPrefix().longValue());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,16 @@
|
||||||
package ca.uhn.fhir.jpa.entity;
|
package ca.uhn.fhir.jpa.entity;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
@SuppressWarnings("SpellCheckingInspection")
|
||||||
public class ResourceIndexedSearchParamStringTest {
|
public class ResourceIndexedSearchParamStringTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testHashFunctions() {
|
public void testHashFunctions() {
|
||||||
ResourceIndexedSearchParamString token = new ResourceIndexedSearchParamString("NAME", "value", "VALUE");
|
ResourceIndexedSearchParamString token = new ResourceIndexedSearchParamString(new DaoConfig(), "NAME", "value", "VALUE");
|
||||||
token.setResource(new ResourceTable().setResourceType("Patient"));
|
token.setResource(new ResourceTable().setResourceType("Patient"));
|
||||||
|
|
||||||
// Make sure our hashing function gives consistent results
|
// Make sure our hashing function gives consistent results
|
||||||
|
@ -18,7 +20,7 @@ public class ResourceIndexedSearchParamStringTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testHashFunctionsPrefixOnly() {
|
public void testHashFunctionsPrefixOnly() {
|
||||||
ResourceIndexedSearchParamString token = new ResourceIndexedSearchParamString("NAME", "vZZZZZZZZZZZZZZZZ", "VZZZZZZzzzZzzzZ");
|
ResourceIndexedSearchParamString token = new ResourceIndexedSearchParamString(new DaoConfig(), "NAME", "vZZZZZZZZZZZZZZZZ", "VZZZZZZzzzZzzzZ");
|
||||||
token.setResource(new ResourceTable().setResourceType("Patient"));
|
token.setResource(new ResourceTable().setResourceType("Patient"));
|
||||||
|
|
||||||
// Should be the same as in testHashFunctions()
|
// Should be the same as in testHashFunctions()
|
||||||
|
|
|
@ -3159,16 +3159,13 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
||||||
testSearchWithEmptyParameter("/Observation?code=bar&value-concept=");
|
testSearchWithEmptyParameter("/Observation?code=bar&value-concept=");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void testSearchWithEmptyParameter(String url) throws IOException {
|
private void testSearchWithEmptyParameter(String theUrl) throws IOException {
|
||||||
HttpGet get = new HttpGet(ourServerBase + url);
|
HttpGet get = new HttpGet(ourServerBase + theUrl);
|
||||||
CloseableHttpResponse resp = ourHttpClient.execute(get);
|
try (CloseableHttpResponse resp = ourHttpClient.execute(get)) {
|
||||||
try {
|
|
||||||
assertEquals(200, resp.getStatusLine().getStatusCode());
|
assertEquals(200, resp.getStatusLine().getStatusCode());
|
||||||
String respString = IOUtils.toString(resp.getEntity().getContent(), Constants.CHARSET_UTF8);
|
String respString = IOUtils.toString(resp.getEntity().getContent(), Constants.CHARSET_UTF8);
|
||||||
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, respString);
|
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, respString);
|
||||||
assertEquals(1, bundle.getEntry().size());
|
assertEquals(1, bundle.getEntry().size());
|
||||||
} finally {
|
|
||||||
IOUtils.closeQuietly(resp.getEntity().getContent());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,15 @@
|
||||||
package ca.uhn.fhir.jpa.provider.dstu3;
|
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||||
|
|
||||||
import static org.hamcrest.CoreMatchers.containsString;
|
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||||
import static org.hamcrest.Matchers.greaterThan;
|
import ca.uhn.fhir.jpa.term.TerminologyLoaderSvcImpl;
|
||||||
import static org.junit.Assert.assertEquals;
|
import ca.uhn.fhir.jpa.term.ZipCollectionBuilder;
|
||||||
import static org.junit.Assert.assertThat;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import static org.junit.Assert.fail;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.hl7.fhir.dstu3.model.*;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
@ -15,34 +20,45 @@ import java.util.List;
|
||||||
import java.util.zip.ZipEntry;
|
import java.util.zip.ZipEntry;
|
||||||
import java.util.zip.ZipOutputStream;
|
import java.util.zip.ZipOutputStream;
|
||||||
|
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
import org.apache.commons.io.IOUtils;
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
import org.hl7.fhir.dstu3.model.Attachment;
|
import static org.junit.Assert.*;
|
||||||
import org.hl7.fhir.dstu3.model.IntegerType;
|
|
||||||
import org.hl7.fhir.dstu3.model.Parameters;
|
|
||||||
import org.hl7.fhir.dstu3.model.StringType;
|
|
||||||
import org.hl7.fhir.dstu3.model.UriType;
|
|
||||||
import org.junit.AfterClass;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
|
||||||
|
|
||||||
public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
||||||
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProviderDstu3Test.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProviderDstu3Test.class);
|
||||||
|
|
||||||
private byte[] createLoincZip() throws IOException {
|
private static void addFile(ZipOutputStream theZos, String theFileName) throws IOException {
|
||||||
|
theZos.putNextEntry(new ZipEntry(theFileName));
|
||||||
|
theZos.write(IOUtils.toByteArray(TerminologyUploaderProviderDstu3Test.class.getResourceAsStream("/loinc/" + theFileName)));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static byte[] createLoincZip() throws IOException {
|
||||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||||
ZipOutputStream zos = new ZipOutputStream(bos);
|
ZipOutputStream zos = new ZipOutputStream(bos);
|
||||||
|
|
||||||
zos.putNextEntry(new ZipEntry("loinc.csv"));
|
addFile(zos, "loincupload.properties");
|
||||||
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/loinc/loinc.csv")));
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_FILE);
|
||||||
zos.putNextEntry(new ZipEntry("LOINC_2.54_MULTI-AXIAL_HIERARCHY.CSV"));
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_FILE);
|
||||||
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/loinc/LOINC_2.54_MULTI-AXIAL_HIERARCHY.CSV")));
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_GROUP_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_GROUP_TERMS_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PARENT_GROUP_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_PART_RELATED_CODE_MAPPING_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_DOCUMENT_ONTOLOGY_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_RSNA_PLAYBOOK_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_IMAGING_DOCUMENT_CODES_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE);
|
||||||
|
addFile(zos, TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE);
|
||||||
|
|
||||||
zos.close();
|
zos.close();
|
||||||
|
|
||||||
|
|
||||||
byte[] packageBytes = bos.toByteArray();
|
byte[] packageBytes = bos.toByteArray();
|
||||||
return packageBytes;
|
return packageBytes;
|
||||||
}
|
}
|
||||||
|
@ -51,7 +67,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
||||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||||
ZipOutputStream zos = new ZipOutputStream(bos);
|
ZipOutputStream zos = new ZipOutputStream(bos);
|
||||||
|
|
||||||
List<String> inputNames = Arrays.asList("sct2_Concept_Full_INT_20160131.txt","sct2_Concept_Full-en_INT_20160131.txt","sct2_Description_Full-en_INT_20160131.txt","sct2_Identifier_Full_INT_20160131.txt","sct2_Relationship_Full_INT_20160131.txt","sct2_StatedRelationship_Full_INT_20160131.txt","sct2_TextDefinition_Full-en_INT_20160131.txt");
|
List<String> inputNames = Arrays.asList("sct2_Concept_Full_INT_20160131.txt", "sct2_Concept_Full-en_INT_20160131.txt", "sct2_Description_Full-en_INT_20160131.txt", "sct2_Identifier_Full_INT_20160131.txt", "sct2_Relationship_Full_INT_20160131.txt", "sct2_StatedRelationship_Full_INT_20160131.txt", "sct2_TextDefinition_Full-en_INT_20160131.txt");
|
||||||
for (String nextName : inputNames) {
|
for (String nextName : inputNames) {
|
||||||
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + nextName));
|
zos.putNextEntry(new ZipEntry("SnomedCT_Release_INT_20160131_Full/Terminology/" + nextName));
|
||||||
byte[] b = IOUtils.toByteArray(getClass().getResourceAsStream("/sct/" + nextName));
|
byte[] b = IOUtils.toByteArray(getClass().getResourceAsStream("/sct/" + nextName));
|
||||||
|
@ -95,7 +111,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
||||||
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
|
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
|
||||||
ourLog.info(resp);
|
ourLog.info(resp);
|
||||||
|
|
||||||
assertThat(((IntegerType)respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
|
assertThat(((IntegerType) respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Try uploading a second time
|
* Try uploading a second time
|
||||||
|
@ -150,7 +166,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testUploadPackageMissingUrl() throws Exception {
|
public void testUploadPackageMissingUrl() {
|
||||||
try {
|
try {
|
||||||
ourClient
|
ourClient
|
||||||
.operation()
|
.operation()
|
||||||
|
@ -179,7 +195,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
||||||
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
|
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
|
||||||
ourLog.info(resp);
|
ourLog.info(resp);
|
||||||
|
|
||||||
assertThat(((IntegerType)respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
|
assertThat(((IntegerType) respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -205,7 +221,7 @@ public class TerminologyUploaderProviderDstu3Test extends BaseResourceProviderDs
|
||||||
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
|
String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(respParam);
|
||||||
ourLog.info(resp);
|
ourLog.info(resp);
|
||||||
|
|
||||||
assertThat(((IntegerType)respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
|
assertThat(((IntegerType) respParam.getParameter().get(0).getValue()).getValue(), greaterThan(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
|
|
|
@ -40,64 +40,6 @@ public class AuthorizationInterceptorResourceProviderR4Test extends BaseResource
|
||||||
unregisterInterceptors();
|
unregisterInterceptors();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* See #778
|
|
||||||
*/
|
|
||||||
@Test
|
|
||||||
public void testReadingObservationAccessRight() {
|
|
||||||
Practitioner practitioner1 = new Practitioner();
|
|
||||||
final IIdType practitionerId1 = myClient.create().resource(practitioner1).execute().getId().toUnqualifiedVersionless();
|
|
||||||
|
|
||||||
Practitioner practitioner2 = new Practitioner();
|
|
||||||
final IIdType practitionerId2 = myClient.create().resource(practitioner2).execute().getId().toUnqualifiedVersionless();
|
|
||||||
|
|
||||||
Patient patient = new Patient();
|
|
||||||
patient.setActive(true);
|
|
||||||
final IIdType patientId = myClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
|
|
||||||
|
|
||||||
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
|
|
||||||
@Override
|
|
||||||
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
|
||||||
// allow write all Observation resource
|
|
||||||
// allow read only Observation resource in which it has a practitioner1 or practitioner2 compartment
|
|
||||||
return new RuleBuilder().allow()
|
|
||||||
.write()
|
|
||||||
.resourcesOfType(Observation.class)
|
|
||||||
.withAnyId()
|
|
||||||
.andThen()
|
|
||||||
.allow()
|
|
||||||
.read()
|
|
||||||
.resourcesOfType(Observation.class)
|
|
||||||
.inCompartment("Practitioner", Arrays.asList(practitionerId1, practitionerId2))
|
|
||||||
.andThen()
|
|
||||||
.denyAll()
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Observation obs1 = new Observation();
|
|
||||||
obs1.setStatus(ObservationStatus.FINAL);
|
|
||||||
obs1.setPerformer(
|
|
||||||
Arrays.asList(new Reference(practitionerId1), new Reference(practitionerId2)));
|
|
||||||
IIdType oid1 = myClient.create().resource(obs1).execute().getId().toUnqualified();
|
|
||||||
|
|
||||||
// Observation with practitioner1 and practitioner1 as the Performer -> should have the read access
|
|
||||||
myClient.read().resource(Observation.class).withId(oid1).execute();
|
|
||||||
|
|
||||||
Observation obs2 = new Observation();
|
|
||||||
obs2.setStatus(ObservationStatus.FINAL);
|
|
||||||
obs2.setSubject(new Reference(patientId));
|
|
||||||
IIdType oid2 = myClient.create().resource(obs2).execute().getId().toUnqualified();
|
|
||||||
|
|
||||||
// Observation with patient as the subject -> read access should be blocked
|
|
||||||
try {
|
|
||||||
myClient.read().resource(Observation.class).withId(oid2).execute();
|
|
||||||
fail();
|
|
||||||
} catch (ForbiddenOperationException e) {
|
|
||||||
// good
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* See #667
|
* See #667
|
||||||
*/
|
*/
|
||||||
|
@ -455,13 +397,11 @@ public class AuthorizationInterceptorResourceProviderR4Test extends BaseResource
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Create a bundle that will be used as a transaction
|
// Create a bundle that will be used as a transaction
|
||||||
Bundle bundle = new Bundle();
|
Bundle bundle = new Bundle();
|
||||||
bundle.setType(Bundle.BundleType.TRANSACTION);
|
bundle.setType(Bundle.BundleType.TRANSACTION);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
String encounterId = "123-123";
|
String encounterId = "123-123";
|
||||||
String encounterSystem = "http://our.internal.code.system/encounter";
|
String encounterSystem = "http://our.internal.code.system/encounter";
|
||||||
Encounter encounter = new Encounter();
|
Encounter encounter = new Encounter();
|
||||||
|
@ -523,8 +463,117 @@ public class AuthorizationInterceptorResourceProviderR4Test extends BaseResource
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPatchWithinCompartment() {
|
||||||
|
Patient pt1 = new Patient();
|
||||||
|
pt1.setActive(true);
|
||||||
|
final IIdType pid1 = myClient.create().resource(pt1).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation obs1 = new Observation();
|
||||||
|
obs1.setStatus(ObservationStatus.FINAL);
|
||||||
|
obs1.setSubject(new Reference(pid1));
|
||||||
|
IIdType oid1 = myClient.create().resource(obs1).execute().getId().toUnqualified();
|
||||||
|
|
||||||
|
Patient pt2 = new Patient();
|
||||||
|
pt2.setActive(false);
|
||||||
|
final IIdType pid2 = myClient.create().resource(pt2).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Observation obs2 = new Observation();
|
||||||
|
obs2.setStatus(ObservationStatus.FINAL);
|
||||||
|
obs2.setSubject(new Reference(pid2));
|
||||||
|
IIdType oid2 = myClient.create().resource(obs2).execute().getId().toUnqualified();
|
||||||
|
|
||||||
|
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
|
||||||
|
@Override
|
||||||
|
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
||||||
|
return new RuleBuilder()
|
||||||
|
.allow().patch().allRequests().andThen()
|
||||||
|
.allow().write().allResources().inCompartment("Patient", pid1).andThen()
|
||||||
|
.allow().read().allResources().withAnyId().andThen()
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
String patchBody = "[\n" +
|
||||||
|
" { \"op\": \"replace\", \"path\": \"Observation/status\", \"value\": \"amended\" }\n" +
|
||||||
|
" ]";
|
||||||
|
|
||||||
|
// Allowed
|
||||||
|
myClient.patch().withBody(patchBody).withId(oid1).execute();
|
||||||
|
obs1 = myClient.read().resource(Observation.class).withId(oid1.toUnqualifiedVersionless()).execute();
|
||||||
|
assertEquals(ObservationStatus.AMENDED, obs1.getStatus());
|
||||||
|
|
||||||
|
// Denied
|
||||||
|
try {
|
||||||
|
myClient.patch().withBody(patchBody).withId(oid2).execute();
|
||||||
|
fail();
|
||||||
|
} catch (ForbiddenOperationException e) {
|
||||||
|
// good
|
||||||
|
}
|
||||||
|
obs2 = myClient.read().resource(Observation.class).withId(oid2.toUnqualifiedVersionless()).execute();
|
||||||
|
assertEquals(ObservationStatus.FINAL, obs2.getStatus());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* See #778
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testReadingObservationAccessRight() {
|
||||||
|
Practitioner practitioner1 = new Practitioner();
|
||||||
|
final IIdType practitionerId1 = myClient.create().resource(practitioner1).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Practitioner practitioner2 = new Practitioner();
|
||||||
|
final IIdType practitionerId2 = myClient.create().resource(practitioner2).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.setActive(true);
|
||||||
|
final IIdType patientId = myClient.create().resource(patient).execute().getId().toUnqualifiedVersionless();
|
||||||
|
|
||||||
|
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
|
||||||
|
@Override
|
||||||
|
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
|
||||||
|
// allow write all Observation resource
|
||||||
|
// allow read only Observation resource in which it has a practitioner1 or practitioner2 compartment
|
||||||
|
return new RuleBuilder().allow()
|
||||||
|
.write()
|
||||||
|
.resourcesOfType(Observation.class)
|
||||||
|
.withAnyId()
|
||||||
|
.andThen()
|
||||||
|
.allow()
|
||||||
|
.read()
|
||||||
|
.resourcesOfType(Observation.class)
|
||||||
|
.inCompartment("Practitioner", Arrays.asList(practitionerId1, practitionerId2))
|
||||||
|
.andThen()
|
||||||
|
.denyAll()
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Observation obs1 = new Observation();
|
||||||
|
obs1.setStatus(ObservationStatus.FINAL);
|
||||||
|
obs1.setPerformer(
|
||||||
|
Arrays.asList(new Reference(practitionerId1), new Reference(practitionerId2)));
|
||||||
|
IIdType oid1 = myClient.create().resource(obs1).execute().getId().toUnqualified();
|
||||||
|
|
||||||
|
// Observation with practitioner1 and practitioner1 as the Performer -> should have the read access
|
||||||
|
myClient.read().resource(Observation.class).withId(oid1).execute();
|
||||||
|
|
||||||
|
Observation obs2 = new Observation();
|
||||||
|
obs2.setStatus(ObservationStatus.FINAL);
|
||||||
|
obs2.setSubject(new Reference(patientId));
|
||||||
|
IIdType oid2 = myClient.create().resource(obs2).execute().getId().toUnqualified();
|
||||||
|
|
||||||
|
// Observation with patient as the subject -> read access should be blocked
|
||||||
|
try {
|
||||||
|
myClient.read().resource(Observation.class).withId(oid2).execute();
|
||||||
|
fail();
|
||||||
|
} catch (ForbiddenOperationException e) {
|
||||||
|
// good
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void unregisterInterceptors() {
|
private void unregisterInterceptors() {
|
||||||
for (IServerInterceptor next : new ArrayList<IServerInterceptor>(ourRestServer.getInterceptors())) {
|
for (IServerInterceptor next : new ArrayList<>(ourRestServer.getInterceptors())) {
|
||||||
if (next instanceof AuthorizationInterceptor) {
|
if (next instanceof AuthorizationInterceptor) {
|
||||||
ourRestServer.unregisterInterceptor(next);
|
ourRestServer.unregisterInterceptor(next);
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,19 +2,27 @@ package ca.uhn.fhir.jpa.provider.r4;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||||
import ca.uhn.fhir.jpa.util.ExpungeOptions;
|
import ca.uhn.fhir.jpa.util.ExpungeOptions;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import org.hamcrest.Matchers;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
import org.hl7.fhir.r4.model.IdType;
|
||||||
import org.hl7.fhir.r4.model.Observation;
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
import org.hl7.fhir.r4.model.Patient;
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.empty;
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
|
@ -58,11 +66,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
getDao(theId).read(theId);
|
getDao(theId).read(theId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
@Before
|
|
||||||
public void before() throws Exception {
|
|
||||||
super.before();
|
|
||||||
|
|
||||||
|
public void createStandardPatients() {
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.setId("PT-ONEVERSION");
|
p.setId("PT-ONEVERSION");
|
||||||
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
|
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
|
||||||
|
@ -105,7 +110,6 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
o.setStatus(Observation.ObservationStatus.FINAL);
|
o.setStatus(Observation.ObservationStatus.FINAL);
|
||||||
myDeletedObservationId = myObservationDao.create(o).getId();
|
myDeletedObservationId = myObservationDao.create(o).getId();
|
||||||
myDeletedObservationId = myObservationDao.delete(myDeletedObservationId).getId();
|
myDeletedObservationId = myObservationDao.delete(myDeletedObservationId).getId();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private IFhirResourceDao<?> getDao(IIdType theId) {
|
private IFhirResourceDao<?> getDao(IIdType theId) {
|
||||||
|
@ -126,6 +130,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpungeInstanceOldVersionsAndDeleted() {
|
public void testExpungeInstanceOldVersionsAndDeleted() {
|
||||||
|
createStandardPatients();
|
||||||
|
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.setId("PT-TWOVERSION");
|
p.setId("PT-TWOVERSION");
|
||||||
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
|
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
|
||||||
|
@ -151,8 +157,35 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
assertGone(myDeletedObservationId);
|
assertGone(myDeletedObservationId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testExpungeAllVersionsDeletesRow() {
|
||||||
|
// Create then delete
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setId("TEST");
|
||||||
|
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
|
||||||
|
p.setActive(true);
|
||||||
|
p.addName().setFamily("FOO");
|
||||||
|
myPatientDao.update(p).getId();
|
||||||
|
myPatientDao.delete(new IdType("Patient/TEST"));
|
||||||
|
|
||||||
|
runInTransaction(()-> assertThat(myResourceTableDao.findAll(), not(empty())));
|
||||||
|
runInTransaction(()-> assertThat(myResourceHistoryTableDao.findAll(), not(empty())));
|
||||||
|
runInTransaction(()-> assertThat(myForcedIdDao.findAll(), not(empty())));
|
||||||
|
|
||||||
|
myPatientDao.expunge(new ExpungeOptions()
|
||||||
|
.setExpungeDeletedResources(true)
|
||||||
|
.setExpungeOldVersions(true));
|
||||||
|
|
||||||
|
runInTransaction(()-> assertThat(myResourceTableDao.findAll(), empty()));
|
||||||
|
runInTransaction(()-> assertThat(myResourceHistoryTableDao.findAll(), empty()));
|
||||||
|
runInTransaction(()-> assertThat(myForcedIdDao.findAll(), empty()));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpungeInstanceVersionCurrentVersion() {
|
public void testExpungeInstanceVersionCurrentVersion() {
|
||||||
|
createStandardPatients();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
myPatientDao.expunge(myTwoVersionPatientId.withVersion("2"), new ExpungeOptions()
|
myPatientDao.expunge(myTwoVersionPatientId.withVersion("2"), new ExpungeOptions()
|
||||||
|
@ -166,6 +199,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpungeInstanceVersionOldVersionsAndDeleted() {
|
public void testExpungeInstanceVersionOldVersionsAndDeleted() {
|
||||||
|
createStandardPatients();
|
||||||
|
|
||||||
Patient p = new Patient();
|
Patient p = new Patient();
|
||||||
p.setId("PT-TWOVERSION");
|
p.setId("PT-TWOVERSION");
|
||||||
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
|
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
|
||||||
|
@ -193,6 +228,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpungeSystemOldVersionsAndDeleted() {
|
public void testExpungeSystemOldVersionsAndDeleted() {
|
||||||
|
createStandardPatients();
|
||||||
|
|
||||||
mySystemDao.expunge(new ExpungeOptions()
|
mySystemDao.expunge(new ExpungeOptions()
|
||||||
.setExpungeDeletedResources(true)
|
.setExpungeDeletedResources(true)
|
||||||
.setExpungeOldVersions(true));
|
.setExpungeOldVersions(true));
|
||||||
|
@ -212,6 +249,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpungeTypeDeletedResources() {
|
public void testExpungeTypeDeletedResources() {
|
||||||
|
createStandardPatients();
|
||||||
|
|
||||||
myPatientDao.expunge(new ExpungeOptions()
|
myPatientDao.expunge(new ExpungeOptions()
|
||||||
.setExpungeDeletedResources(true)
|
.setExpungeDeletedResources(true)
|
||||||
.setExpungeOldVersions(false));
|
.setExpungeOldVersions(false));
|
||||||
|
@ -231,6 +270,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpungeTypeOldVersions() {
|
public void testExpungeTypeOldVersions() {
|
||||||
|
createStandardPatients();
|
||||||
|
|
||||||
myPatientDao.expunge(new ExpungeOptions()
|
myPatientDao.expunge(new ExpungeOptions()
|
||||||
.setExpungeDeletedResources(false)
|
.setExpungeDeletedResources(false)
|
||||||
.setExpungeOldVersions(true));
|
.setExpungeOldVersions(true));
|
||||||
|
@ -251,6 +292,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpungeSystemEverything() {
|
public void testExpungeSystemEverything() {
|
||||||
|
createStandardPatients();
|
||||||
|
|
||||||
mySystemDao.expunge(new ExpungeOptions()
|
mySystemDao.expunge(new ExpungeOptions()
|
||||||
.setExpungeEverything(true));
|
.setExpungeEverything(true));
|
||||||
|
|
||||||
|
@ -270,6 +313,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExpungeTypeOldVersionsAndDeleted() {
|
public void testExpungeTypeOldVersionsAndDeleted() {
|
||||||
|
createStandardPatients();
|
||||||
|
|
||||||
myPatientDao.expunge(new ExpungeOptions()
|
myPatientDao.expunge(new ExpungeOptions()
|
||||||
.setExpungeDeletedResources(true)
|
.setExpungeDeletedResources(true)
|
||||||
.setExpungeOldVersions(true));
|
.setExpungeOldVersions(true));
|
||||||
|
|
|
@ -3571,14 +3571,11 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
||||||
|
|
||||||
private void testSearchWithEmptyParameter(String url) throws IOException {
|
private void testSearchWithEmptyParameter(String url) throws IOException {
|
||||||
HttpGet get = new HttpGet(ourServerBase + url);
|
HttpGet get = new HttpGet(ourServerBase + url);
|
||||||
CloseableHttpResponse resp = ourHttpClient.execute(get);
|
try (CloseableHttpResponse resp = ourHttpClient.execute(get)) {
|
||||||
try {
|
|
||||||
assertEquals(200, resp.getStatusLine().getStatusCode());
|
assertEquals(200, resp.getStatusLine().getStatusCode());
|
||||||
String respString = IOUtils.toString(resp.getEntity().getContent(), Constants.CHARSET_UTF8);
|
String respString = IOUtils.toString(resp.getEntity().getContent(), Constants.CHARSET_UTF8);
|
||||||
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, respString);
|
Bundle bundle = myFhirCtx.newXmlParser().parseResource(Bundle.class, respString);
|
||||||
assertEquals(1, bundle.getEntry().size());
|
assertEquals(1, bundle.getEntry().size());
|
||||||
} finally {
|
|
||||||
IOUtils.closeQuietly(resp.getEntity().getContent());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package ca.uhn.fhir.jpa.provider.r4;
|
package ca.uhn.fhir.jpa.provider.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.provider.dstu3.TerminologyUploaderProviderDstu3Test;
|
||||||
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
import ca.uhn.fhir.jpa.term.IHapiTerminologyLoaderSvc;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.util.TestUtil;
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
@ -25,19 +26,6 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes
|
||||||
|
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProviderR4Test.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TerminologyUploaderProviderR4Test.class);
|
||||||
|
|
||||||
private byte[] createLoincZip() throws IOException {
|
|
||||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
|
||||||
ZipOutputStream zos = new ZipOutputStream(bos);
|
|
||||||
|
|
||||||
zos.putNextEntry(new ZipEntry("loinc.csv"));
|
|
||||||
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/loinc/loinc.csv")));
|
|
||||||
zos.putNextEntry(new ZipEntry("LOINC_2.54_MULTI-AXIAL_HIERARCHY.CSV"));
|
|
||||||
zos.write(IOUtils.toByteArray(getClass().getResourceAsStream("/loinc/LOINC_2.54_MULTI-AXIAL_HIERARCHY.CSV")));
|
|
||||||
zos.close();
|
|
||||||
|
|
||||||
byte[] packageBytes = bos.toByteArray();
|
|
||||||
return packageBytes;
|
|
||||||
}
|
|
||||||
|
|
||||||
private byte[] createSctZip() throws IOException {
|
private byte[] createSctZip() throws IOException {
|
||||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||||
|
@ -75,7 +63,7 @@ public class TerminologyUploaderProviderR4Test extends BaseResourceProviderR4Tes
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testUploadLoinc() throws Exception {
|
public void testUploadLoinc() throws Exception {
|
||||||
byte[] packageBytes = createLoincZip();
|
byte[] packageBytes = TerminologyUploaderProviderDstu3Test.createLoincZip();
|
||||||
|
|
||||||
//@formatter:off
|
//@formatter:off
|
||||||
Parameters respParam = myClient
|
Parameters respParam = myClient
|
||||||
|
|
|
@ -321,18 +321,18 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
// ignore
|
// ignore
|
||||||
}
|
}
|
||||||
|
|
||||||
when(mySearchResultDao.findWithSearchUuid(any(Search.class), any(Pageable.class))).thenAnswer(new Answer<Page<SearchResult>>() {
|
when(mySearchResultDao.findWithSearchUuid(any(Search.class), any(Pageable.class))).thenAnswer(new Answer<Page<Long>>() {
|
||||||
@Override
|
@Override
|
||||||
public Page<SearchResult> answer(InvocationOnMock theInvocation) throws Throwable {
|
public Page<Long> answer(InvocationOnMock theInvocation) throws Throwable {
|
||||||
Pageable page = (Pageable) theInvocation.getArguments()[1];
|
Pageable page = (Pageable) theInvocation.getArguments()[1];
|
||||||
|
|
||||||
ArrayList<SearchResult> results = new ArrayList<SearchResult>();
|
ArrayList<Long> results = new ArrayList<Long>();
|
||||||
int max = (page.getPageNumber() * page.getPageSize()) + page.getPageSize();
|
int max = (page.getPageNumber() * page.getPageSize()) + page.getPageSize();
|
||||||
for (long i = page.getOffset(); i < max; i++) {
|
for (long i = page.getOffset(); i < max; i++) {
|
||||||
results.add(new SearchResult().setResourcePid(i + 10L));
|
results.add(i + 10L);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new PageImpl<SearchResult>(results);
|
return new PageImpl<Long>(results);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
search.setStatus(SearchStatusEnum.FINISHED);
|
search.setStatus(SearchStatusEnum.FINISHED);
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
package ca.uhn.fhir.jpa.stresstest;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||||
|
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ISearchParamRegistry;
|
||||||
|
import ca.uhn.fhir.jpa.dao.dstu3.SearchParamExtractorDstu3;
|
||||||
|
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
|
||||||
|
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.util.StopWatch;
|
||||||
|
import org.hl7.fhir.dstu3.hapi.ctx.IValidationSupport;
|
||||||
|
import org.hl7.fhir.dstu3.hapi.validation.CachingValidationSupport;
|
||||||
|
import org.hl7.fhir.dstu3.hapi.validation.DefaultProfileValidationSupport;
|
||||||
|
import org.hl7.fhir.dstu3.hapi.validation.ValidationSupportChain;
|
||||||
|
import org.hl7.fhir.dstu3.model.Patient;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
|
import static org.mockito.Mockito.*;
|
||||||
|
|
||||||
|
public class IndexStressTest {
|
||||||
|
|
||||||
|
private static final Logger ourLog = LoggerFactory.getLogger(IndexStressTest.class);
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testExtractSearchParams() {
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.addName().setFamily("FOO").addGiven("BAR").addGiven("BAR");
|
||||||
|
p.getMaritalStatus().setText("DDDDD");
|
||||||
|
p.addAddress().addLine("A").addLine("B").addLine("C");
|
||||||
|
|
||||||
|
DaoConfig daoConfig = new DaoConfig();
|
||||||
|
FhirContext ctx = FhirContext.forDstu3();
|
||||||
|
IValidationSupport mockValidationSupport = mock(IValidationSupport.class);
|
||||||
|
IValidationSupport validationSupport = new CachingValidationSupport(new ValidationSupportChain(new DefaultProfileValidationSupport(), mockValidationSupport));
|
||||||
|
ISearchParamRegistry searchParamRegistry = mock(ISearchParamRegistry.class);
|
||||||
|
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(daoConfig, ctx, validationSupport, searchParamRegistry);
|
||||||
|
extractor.start();
|
||||||
|
|
||||||
|
Map<String, RuntimeSearchParam> spMap = ctx
|
||||||
|
.getResourceDefinition("Patient")
|
||||||
|
.getSearchParams()
|
||||||
|
.stream()
|
||||||
|
.collect(Collectors.toMap(RuntimeSearchParam::getName, t -> t));
|
||||||
|
when(searchParamRegistry.getActiveSearchParams(eq("Patient"))).thenReturn(spMap);
|
||||||
|
|
||||||
|
ResourceTable entity = new ResourceTable();
|
||||||
|
Set<ResourceIndexedSearchParamString> params = extractor.extractSearchParamStrings(entity, p);
|
||||||
|
|
||||||
|
StopWatch sw = new StopWatch();
|
||||||
|
int loops = 100;
|
||||||
|
for (int i = 0; i < loops; i++) {
|
||||||
|
entity = new ResourceTable();
|
||||||
|
params = extractor.extractSearchParamStrings(entity, p);
|
||||||
|
}
|
||||||
|
|
||||||
|
ourLog.info("Indexed {} times in {}ms/time", loops, sw.getMillisPerOperation(loops));
|
||||||
|
|
||||||
|
assertEquals(9, params.size());
|
||||||
|
verify(mockValidationSupport, times(1)).fetchAllStructureDefinitions((any(FhirContext.class)));
|
||||||
|
}
|
||||||
|
}
|
|
@ -48,7 +48,6 @@ public class StressTestDstu3Test extends BaseResourceProviderDstu3Test {
|
||||||
myRequestValidatingInterceptor.addValidatorModule(module);
|
myRequestValidatingInterceptor.addValidatorModule(module);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testMultithreadedSearch() throws Exception {
|
public void testMultithreadedSearch() throws Exception {
|
||||||
Bundle input = new Bundle();
|
Bundle input = new Bundle();
|
||||||
|
|
|
@ -67,7 +67,6 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
|
||||||
public void testExpandWithPropertyCoding() throws Exception {
|
public void testExpandWithPropertyCoding() throws Exception {
|
||||||
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
||||||
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
|
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
|
||||||
TerminologyLoaderSvcLoincTest.addLoincOptionalFilesToZip(files);
|
|
||||||
myLoader.loadLoinc(files.getFiles(), mySrd);
|
myLoader.loadLoinc(files.getFiles(), mySrd);
|
||||||
|
|
||||||
// Search by code
|
// Search by code
|
||||||
|
@ -121,7 +120,6 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
|
||||||
public void testExpandWithPropertyString() throws Exception {
|
public void testExpandWithPropertyString() throws Exception {
|
||||||
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
||||||
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
|
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
|
||||||
TerminologyLoaderSvcLoincTest.addLoincOptionalFilesToZip(files);
|
|
||||||
myLoader.loadLoinc(files.getFiles(), mySrd);
|
myLoader.loadLoinc(files.getFiles(), mySrd);
|
||||||
|
|
||||||
ValueSet input = new ValueSet();
|
ValueSet input = new ValueSet();
|
||||||
|
@ -144,7 +142,6 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
|
||||||
public void testLookupWithProperties() throws Exception {
|
public void testLookupWithProperties() throws Exception {
|
||||||
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
||||||
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
|
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
|
||||||
TerminologyLoaderSvcLoincTest.addLoincOptionalFilesToZip(files);
|
|
||||||
myLoader.loadLoinc(files.getFiles(), mySrd);
|
myLoader.loadLoinc(files.getFiles(), mySrd);
|
||||||
|
|
||||||
IFhirResourceDaoCodeSystem.LookupCodeResult result = myCodeSystemDao.lookupCode(new StringType("10013-1"), new StringType(IHapiTerminologyLoaderSvc.LOINC_URI), null, mySrd);
|
IFhirResourceDaoCodeSystem.LookupCodeResult result = myCodeSystemDao.lookupCode(new StringType("10013-1"), new StringType(IHapiTerminologyLoaderSvc.LOINC_URI), null, mySrd);
|
||||||
|
@ -172,11 +169,31 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLookupWithProperties2() throws Exception {
|
||||||
|
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
||||||
|
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
|
||||||
|
myLoader.loadLoinc(files.getFiles(), mySrd);
|
||||||
|
|
||||||
|
IFhirResourceDaoCodeSystem.LookupCodeResult result = myCodeSystemDao.lookupCode(new StringType("17788-1"), new StringType(IHapiTerminologyLoaderSvc.LOINC_URI), null, mySrd);
|
||||||
|
org.hl7.fhir.r4.model.Parameters parametersR4 = result.toParameters(null);
|
||||||
|
Parameters parameters = VersionConvertor_30_40.convertParameters(parametersR4);
|
||||||
|
|
||||||
|
ourLog.info(myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(parameters));
|
||||||
|
|
||||||
|
Optional<Coding> propertyValue = findProperty(parameters, "COMPONENT");
|
||||||
|
assertTrue(propertyValue.isPresent());
|
||||||
|
assertEquals(IHapiTerminologyLoaderSvc.LOINC_URI, propertyValue.get().getSystem());
|
||||||
|
assertEquals("LP19258-0", propertyValue.get().getCode());
|
||||||
|
assertEquals("Qn", propertyValue.get().getDisplay());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLookupWithPropertiesExplicit() throws Exception {
|
public void testLookupWithPropertiesExplicit() throws Exception {
|
||||||
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
||||||
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
|
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesToZip(files);
|
||||||
TerminologyLoaderSvcLoincTest.addLoincOptionalFilesToZip(files);
|
|
||||||
myLoader.loadLoinc(files.getFiles(), mySrd);
|
myLoader.loadLoinc(files.getFiles(), mySrd);
|
||||||
|
|
||||||
IFhirResourceDaoCodeSystem.LookupCodeResult result = myCodeSystemDao.lookupCode(new StringType("10013-1"), new StringType(IHapiTerminologyLoaderSvc.LOINC_URI), null, mySrd);
|
IFhirResourceDaoCodeSystem.LookupCodeResult result = myCodeSystemDao.lookupCode(new StringType("10013-1"), new StringType(IHapiTerminologyLoaderSvc.LOINC_URI), null, mySrd);
|
||||||
|
|
|
@ -13,12 +13,13 @@ import org.hl7.fhir.r4.model.Enumerations;
|
||||||
import org.hl7.fhir.r4.model.ValueSet;
|
import org.hl7.fhir.r4.model.ValueSet;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
import org.mockito.Captor;
|
import org.mockito.Captor;
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.runners.MockitoJUnitRunner;
|
import org.mockito.junit.MockitoJUnitRunner;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -26,9 +27,10 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.contains;
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.hamcrest.Matchers.empty;
|
import static org.hamcrest.Matchers.empty;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
import static org.mockito.Matchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.Mockito.times;
|
import static org.mockito.Mockito.times;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
|
|
||||||
|
@ -92,7 +94,6 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
@Test
|
@Test
|
||||||
public void testLoadLoinc() throws Exception {
|
public void testLoadLoinc() throws Exception {
|
||||||
addLoincMandatoryFilesToZip(myFiles);
|
addLoincMandatoryFilesToZip(myFiles);
|
||||||
addLoincOptionalFilesToZip(myFiles);
|
|
||||||
|
|
||||||
// Actually do the load
|
// Actually do the load
|
||||||
mySvc.loadLoinc(myFiles.getFiles(), details);
|
mySvc.loadLoinc(myFiles.getFiles(), details);
|
||||||
|
@ -116,6 +117,13 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
assertEquals("EKG.MEAS", code.getStringProperty("CLASS"));
|
assertEquals("EKG.MEAS", code.getStringProperty("CLASS"));
|
||||||
assertEquals("R' wave amplitude in lead I", code.getDisplay());
|
assertEquals("R' wave amplitude in lead I", code.getDisplay());
|
||||||
|
|
||||||
|
// Code with component that has a divisor
|
||||||
|
code = concepts.get("17788-1");
|
||||||
|
assertEquals("17788-1", code.getCode());
|
||||||
|
assertEquals(1, code.getCodingProperties("COMPONENT").size());
|
||||||
|
assertEquals("http://loinc.org", code.getCodingProperties("COMPONENT").get(0).getSystem());
|
||||||
|
assertEquals("LP19258-0", code.getCodingProperties("COMPONENT").get(0).getCode());
|
||||||
|
|
||||||
// Loinc code with answer
|
// Loinc code with answer
|
||||||
code = concepts.get("61438-8");
|
code = concepts.get("61438-8");
|
||||||
assertThat(code.getStringProperties("answer-list"), contains("LL1000-0"));
|
assertThat(code.getStringProperties("answer-list"), contains("LL1000-0"));
|
||||||
|
@ -221,9 +229,9 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
assertEquals("NM", code.getCodingProperties(propertyName).get(0).getDisplay());
|
assertEquals("NM", code.getCodingProperties(propertyName).get(0).getDisplay());
|
||||||
|
|
||||||
// RSNA Playbook - LOINC Part -> RadLex RID Mappings
|
// RSNA Playbook - LOINC Part -> RadLex RID Mappings
|
||||||
conceptMap = conceptMaps.get(LoincRsnaPlaybookHandler.RID_MAPPING_CM_ID);
|
conceptMap = conceptMaps.get(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_ID);
|
||||||
assertEquals(LoincRsnaPlaybookHandler.RID_MAPPING_CM_URI, conceptMap.getUrl());
|
assertEquals(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_URI, conceptMap.getUrl());
|
||||||
assertEquals(LoincRsnaPlaybookHandler.RID_MAPPING_CM_NAME, conceptMap.getName());
|
assertEquals(LoincPartRelatedCodeMappingHandler.LOINC_PART_TO_RID_PART_MAP_NAME, conceptMap.getName());
|
||||||
assertEquals(1, conceptMap.getGroup().size());
|
assertEquals(1, conceptMap.getGroup().size());
|
||||||
group = conceptMap.getGroupFirstRep();
|
group = conceptMap.getGroupFirstRep();
|
||||||
// all entries have the same source and target so these should be null
|
// all entries have the same source and target so these should be null
|
||||||
|
@ -237,9 +245,9 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
assertEquals(Enumerations.ConceptMapEquivalence.EQUAL, group.getElement().get(0).getTarget().get(0).getEquivalence());
|
assertEquals(Enumerations.ConceptMapEquivalence.EQUAL, group.getElement().get(0).getTarget().get(0).getEquivalence());
|
||||||
|
|
||||||
// RSNA Playbook - LOINC Term -> RadLex RPID Mappings
|
// RSNA Playbook - LOINC Term -> RadLex RPID Mappings
|
||||||
conceptMap = conceptMaps.get(LoincRsnaPlaybookHandler.RPID_MAPPING_CM_ID);
|
conceptMap = conceptMaps.get(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_ID);
|
||||||
assertEquals(LoincRsnaPlaybookHandler.RPID_MAPPING_CM_URI, conceptMap.getUrl());
|
assertEquals(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_URI, conceptMap.getUrl());
|
||||||
assertEquals(LoincRsnaPlaybookHandler.RPID_MAPPING_CM_NAME, conceptMap.getName());
|
assertEquals(LoincPartRelatedCodeMappingHandler.LOINC_TERM_TO_RPID_PART_MAP_NAME, conceptMap.getName());
|
||||||
assertEquals(1, conceptMap.getGroup().size());
|
assertEquals(1, conceptMap.getGroup().size());
|
||||||
group = conceptMap.getGroupFirstRep();
|
group = conceptMap.getGroupFirstRep();
|
||||||
// all entries have the same source and target so these should be null
|
// all entries have the same source and target so these should be null
|
||||||
|
@ -286,7 +294,7 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
|
|
||||||
// IEEE Medical Device Codes
|
// IEEE Medical Device Codes
|
||||||
conceptMap = conceptMaps.get(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_ID);
|
conceptMap = conceptMaps.get(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_ID);
|
||||||
ourLog.info(FhirContext.forR4().newXmlParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
ourLog.debug(FhirContext.forR4().newXmlParser().setPrettyPrint(true).encodeResourceToString(conceptMap));
|
||||||
assertEquals(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_NAME, conceptMap.getName());
|
assertEquals(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_NAME, conceptMap.getName());
|
||||||
assertEquals(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_URI, conceptMap.getUrl());
|
assertEquals(LoincIeeeMedicalDeviceCodeHandler.LOINC_IEEE_CM_URI, conceptMap.getUrl());
|
||||||
assertEquals(1, conceptMap.getGroup().size());
|
assertEquals(1, conceptMap.getGroup().size());
|
||||||
|
@ -308,9 +316,29 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
assertEquals(9, vs.getCompose().getInclude().get(0).getConcept().size());
|
assertEquals(9, vs.getCompose().getInclude().get(0).getConcept().size());
|
||||||
assertEquals("11525-3", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode());
|
assertEquals("11525-3", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode());
|
||||||
assertEquals("US Pelvis Fetus for pregnancy", vs.getCompose().getInclude().get(0).getConcept().get(0).getDisplay());
|
assertEquals("US Pelvis Fetus for pregnancy", vs.getCompose().getInclude().get(0).getConcept().get(0).getDisplay());
|
||||||
|
|
||||||
|
// Group - Parent
|
||||||
|
vs = valueSets.get("LG100-4");
|
||||||
|
ourLog.info(FhirContext.forR4().newXmlParser().setPrettyPrint(true).encodeResourceToString(vs));
|
||||||
|
assertEquals("Chem_DrugTox_Chal_Sero_Allergy<SAME:Comp|Prop|Tm|Syst (except intravascular and urine)><ANYBldSerPlas,ANYUrineUrineSed><ROLLUP:Method>", vs.getName());
|
||||||
|
assertEquals("http://loinc.org/vs/LG100-4", vs.getUrl());
|
||||||
|
assertEquals(1, vs.getCompose().getInclude().size());
|
||||||
|
assertEquals(1, vs.getCompose().getInclude().get(0).getValueSet().size());
|
||||||
|
assertEquals("http://loinc.org/vs/LG1695-8", vs.getCompose().getInclude().get(0).getValueSet().get(0).getValueAsString());
|
||||||
|
|
||||||
|
// Group - Child
|
||||||
|
vs = valueSets.get("LG1695-8");
|
||||||
|
ourLog.info(FhirContext.forR4().newXmlParser().setPrettyPrint(true).encodeResourceToString(vs));
|
||||||
|
assertEquals("1,4-Dichlorobenzene|MCnc|Pt|ANYBldSerPl", vs.getName());
|
||||||
|
assertEquals("http://loinc.org/vs/LG1695-8", vs.getUrl());
|
||||||
|
assertEquals(1, vs.getCompose().getInclude().size());
|
||||||
|
assertEquals(2, vs.getCompose().getInclude().get(0).getConcept().size());
|
||||||
|
assertEquals("17424-3", vs.getCompose().getInclude().get(0).getConcept().get(0).getCode());
|
||||||
|
assertEquals("13006-2", vs.getCompose().getInclude().get(0).getConcept().get(1).getCode());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@Ignore
|
||||||
public void testLoadLoincMandatoryFilesOnly() throws IOException {
|
public void testLoadLoincMandatoryFilesOnly() throws IOException {
|
||||||
addLoincMandatoryFilesToZip(myFiles);
|
addLoincMandatoryFilesToZip(myFiles);
|
||||||
|
|
||||||
|
@ -334,26 +362,29 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLoadLoincMissingMandatoryFiles() throws IOException {
|
public void testLoadLoincMissingMandatoryFiles() throws IOException {
|
||||||
addLoincOptionalFilesToZip(myFiles);
|
myFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UPLOAD_PROPERTIES_FILE);
|
||||||
|
myFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_FILE);
|
||||||
|
|
||||||
// Actually do the load
|
// Actually do the load
|
||||||
try {
|
try {
|
||||||
mySvc.loadLoinc(myFiles.getFiles(), details);
|
mySvc.loadLoinc(myFiles.getFiles(), details);
|
||||||
fail();
|
fail();
|
||||||
} catch (UnprocessableEntityException e) {
|
} catch (UnprocessableEntityException e) {
|
||||||
assertEquals("Could not find the following mandatory files in input: [loinc.csv, MULTI-AXIAL_HIERARCHY.CSV]", e.getMessage());
|
assertThat(e.getMessage(), containsString("Could not find the following mandatory files in input:"));
|
||||||
|
assertThat(e.getMessage(), containsString("Loinc.csv"));
|
||||||
|
assertThat(e.getMessage(), containsString("MultiAxialHierarchy.csv"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void addLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles) throws IOException {
|
public static void addLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles) throws IOException {
|
||||||
theFiles.addFileZip("/loinc/", "loinc.csv", TerminologyLoaderSvcImpl.LOINC_FILE);
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UPLOAD_PROPERTIES_FILE);
|
||||||
theFiles.addFileZip("/loinc/", "hierarchy.csv", TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE);
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_FILE);
|
||||||
}
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_GROUP_TERMS_FILE);
|
||||||
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PARENT_GROUP_FILE);
|
||||||
static void addLoincOptionalFilesToZip(ZipCollectionBuilder theFiles) throws IOException {
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_FILE, TerminologyLoaderSvcImpl.LOINC_FILE);
|
||||||
theFiles.addFileZip("/loinc/", "loincupload.properties");
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE, TerminologyLoaderSvcImpl.LOINC_HIERARCHY_FILE);
|
||||||
theFiles.addFileZip("/loinc/", "AnswerList_Beta_1.csv", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE);
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_FILE);
|
||||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE);
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_ANSWERLIST_LINK_FILE);
|
||||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_FILE, TerminologyLoaderSvcImpl.LOINC_PART_FILE);
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_FILE, TerminologyLoaderSvcImpl.LOINC_PART_FILE);
|
||||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE);
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE, TerminologyLoaderSvcImpl.LOINC_PART_LINK_FILE);
|
||||||
|
@ -363,13 +394,8 @@ public class TerminologyLoaderSvcLoincTest {
|
||||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE);
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE);
|
||||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV);
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_CSV);
|
||||||
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_IMAGING_DOCUMENT_CODES_FILE);
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_IMAGING_DOCUMENT_CODES_FILE);
|
||||||
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE);
|
||||||
/*
|
theFiles.addFileZip("/loinc/", TerminologyLoaderSvcImpl.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE);
|
||||||
* Top 2000 files have versions in the filename so don't use the
|
|
||||||
* constant.. that way this is a better test
|
|
||||||
*/
|
|
||||||
theFiles.addFilePlain("/loinc/", "LOINC_1.6_Top2000CommonLabResultsSI.csv");
|
|
||||||
theFiles.addFilePlain("/loinc/", "LOINC_1.6_Top2000CommonLabResultsUS.csv");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue