Fix #674 - Avoid duplicates in $everything query
Squashed commit of the following: commitf3097f423f
Author: James Agnew <jamesagnew@gmail.com> Date: Mon Jun 19 13:24:29 2017 -0400 more travis fun commita4b8161597
Author: James Agnew <jamesagnew@gmail.com> Date: Mon Jun 19 10:43:33 2017 -0400 More fighting with travis commitfe47d1e864
Author: James Agnew <jamesagnew@gmail.com> Date: Mon Jun 19 10:10:55 2017 -0400 More travis attempts commit4fdfe7a4e8
Author: James Agnew <jamesagnew@gmail.com> Date: Mon Jun 19 09:25:04 2017 -0400 Try and run unit tests in 2 threads to cut time.. Will travis like this? commit571045b63d
Author: James <jamesagnew@gmail.com> Date: Mon Jun 19 07:35:46 2017 -0400 Paging now working commit526a1fa7d0
Merge:cebe881a15
55a67ae055
Author: James Agnew <jamesagnew@gmail.com> Date: Mon Jun 19 06:19:37 2017 -0400 Merge branch '674_everything_improvements' of github.com:jamesagnew/hapi-fhir into 674_everything_improvements commitcebe881a15
Merge:b3b9273ca7
5789cd2a46
Author: James Agnew <jamesagnew@gmail.com> Date: Mon Jun 19 06:19:12 2017 -0400 Merge branch 'master' into 674_everything_improvements for #674 commitb3b9273ca7
Author: James Agnew <jamesagnew@gmail.com> Date: Mon Jun 19 06:16:27 2017 -0400 Work on everything fixes for #674 commit55a67ae055
Author: James Agnew <jamesagnew@gmail.com> Date: Mon Jun 19 06:16:27 2017 -0400 Work on everything fixes
This commit is contained in:
parent
f5a3ad6751
commit
e147cf321d
|
@ -1,5 +1,6 @@
|
|||
# Use docker-based build environment (instead of openvz)
|
||||
sudo: false
|
||||
#sudo: false
|
||||
sudo: required
|
||||
|
||||
language: java
|
||||
jdk:
|
||||
|
@ -21,4 +22,4 @@ before_script:
|
|||
script:
|
||||
# - mvn -e -B clean install && cd hapi-fhir-ra && mvn -e -B -DTRAVIS_JOB_ID=$TRAVIS_JOB_ID clean test jacoco:report coveralls:report
|
||||
# - mvn -Dci=true -e -B -P ALLMODULES,NOPARALLEL,ERRORPRONE clean install && cd hapi-fhir-jacoco && mvn -e -B -DTRAVIS_JOB_ID=$TRAVIS_JOB_ID jacoco:report coveralls:report
|
||||
- mvn -Dci=true -e -B -P ALLMODULES,NOPARALLEL clean install && cd hapi-fhir-jacoco && mvn -e -B -DTRAVIS_JOB_ID=$TRAVIS_JOB_ID jacoco:report coveralls:report
|
||||
- mvn -Dci=true -e -B -P ALLMODULES,MINPARALLEL clean install && cd hapi-fhir-jacoco && mvn -e -B -DTRAVIS_JOB_ID=$TRAVIS_JOB_ID jacoco:report coveralls:report
|
||||
|
|
|
@ -632,13 +632,21 @@
|
|||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
<!-- <profile> <id>DIST</id> <build> <plugins> <plugin> <groupId>de.juplo</groupId> <artifactId>hibernate4-maven-plugin</artifactId> <configuration> <force>true</force> <target>SCRIPT</target> <skip>${skip-hib4}</skip>
|
||||
</configuration> <dependencies> <dependency> <groupId>org.hibernate</groupId> <artifactId>hibernate-core</artifactId> <version>${hibernate_version}</version> </dependency> </dependencies> <executions>
|
||||
<execution> <id>o10g</id> <goals> <goal>export</goal> </goals> <phase>test</phase> <configuration> <hibernateDialect>org.hibernate.dialect.Oracle10gDialect</hibernateDialect> <outputFile>${project.build.directory}/schema_oracle_10g.sql</outputFile>
|
||||
</configuration> </execution> <execution> <id>derby</id> <goals> <goal>export</goal> </goals> <phase>test</phase> <configuration> <hibernateDialect>org.hibernate.dialect.DerbyTenSevenDialect</hibernateDialect>
|
||||
<outputFile>${project.build.directory}/schema_derby.sql</outputFile> </configuration> </execution> <execution> <id>hsql</id> <goals> <goal>export</goal> </goals> <phase>test</phase> <configuration> <hibernateDialect>org.hibernate.dialect.HSQLDialect</hibernateDialect>
|
||||
<outputFile>${project.build.directory}/schema_hsql.sql</outputFile> </configuration> </execution> <execution> <id>mysql5</id> <goals> <goal>export</goal> </goals> <phase>test</phase> <configuration> <hibernateDialect>org.hibernate.dialect.MySQL5Dialect</hibernateDialect>
|
||||
<outputFile>${project.build.directory}/schema_mysql_5.sql</outputFile> </configuration> </execution> </executions> </plugin> </plugins> </build> </profile> -->
|
||||
</profiles>
|
||||
<profile>
|
||||
<id>MINPARALLEL</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<forkCount>2</forkCount>
|
||||
<reuseForks>true</reuseForks>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
</profiles>
|
||||
|
||||
</project>
|
||||
|
|
|
@ -170,6 +170,7 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.REQUIRED, readOnly=true)
|
||||
@Override
|
||||
public Map<String, Long> getResourceCounts() {
|
||||
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
|
||||
|
|
|
@ -87,15 +87,16 @@ public class DaoConfig {
|
|||
|
||||
private boolean myEnforceReferentialIntegrityOnWrite = true;
|
||||
|
||||
private int myEverythingIncludesFetchPageSize = 50;
|
||||
/**
|
||||
* update setter javadoc if default changes
|
||||
*/
|
||||
private long myExpireSearchResultsAfterMillis = DateUtils.MILLIS_PER_HOUR;
|
||||
|
||||
/**
|
||||
* update setter javadoc if default changes
|
||||
*/
|
||||
private Integer myFetchSizeDefaultMaximum = null;
|
||||
|
||||
private int myHardTagListLimit = 1000;
|
||||
private int myIncludeLimit = 2000;
|
||||
/**
|
||||
|
@ -147,6 +148,22 @@ public class DaoConfig {
|
|||
return myDeferIndexingForCodesystemsOfSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unlike with normal search queries, $everything queries have their _includes loaded by the main search thread and these included results
|
||||
* are added to the normal search results instead of being added on as extras in a page. This means that they will not appear multiple times
|
||||
* as the search results are paged over.
|
||||
* <p>
|
||||
* In order to recursively load _includes, we process the original results in batches of this size. Adjust with caution, increasing this
|
||||
* value may improve performance but may also cause memory issues.
|
||||
* </p>
|
||||
* <p>
|
||||
* The default value is 50
|
||||
* </p>
|
||||
*/
|
||||
public int getEverythingIncludesFetchPageSize() {
|
||||
return myEverythingIncludesFetchPageSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the number of milliseconds that search results for a given client search
|
||||
* should be preserved before being purged from the database.
|
||||
|
@ -537,6 +554,23 @@ public class DaoConfig {
|
|||
myEnforceReferentialIntegrityOnWrite = theEnforceReferentialIntegrityOnWrite;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unlike with normal search queries, $everything queries have their _includes loaded by the main search thread and these included results
|
||||
* are added to the normal search results instead of being added on as extras in a page. This means that they will not appear multiple times
|
||||
* as the search results are paged over.
|
||||
* <p>
|
||||
* In order to recursively load _includes, we process the original results in batches of this size. Adjust with caution, increasing this
|
||||
* value may improve performance but may also cause memory issues.
|
||||
* </p>
|
||||
* <p>
|
||||
* The default value is 50
|
||||
* </p>
|
||||
*/
|
||||
public void setEverythingIncludesFetchPageSize(int theEverythingIncludesFetchPageSize) {
|
||||
Validate.inclusiveBetween(1, Integer.MAX_VALUE, theEverythingIncludesFetchPageSize);
|
||||
myEverythingIncludesFetchPageSize = theEverythingIncludesFetchPageSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* If this is set to <code>false</code> (default is <code>true</code>) the stale search deletion
|
||||
* task will be disabled (meaning that search results will be retained in the database indefinitely). USE WITH CAUTION.
|
||||
|
|
|
@ -27,31 +27,86 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.MathContext;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.TypedQuery;
|
||||
import javax.persistence.criteria.*;
|
||||
import javax.persistence.criteria.AbstractQuery;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaBuilder.In;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Expression;
|
||||
import javax.persistence.criteria.From;
|
||||
import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.JoinType;
|
||||
import javax.persistence.criteria.Order;
|
||||
import javax.persistence.criteria.Path;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
import javax.persistence.criteria.Subquery;
|
||||
|
||||
import org.apache.commons.lang3.*;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import com.google.common.collect.*;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import ca.uhn.fhir.context.*;
|
||||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeDeclaredChildDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.context.RuntimeChildChoiceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeChildResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
|
||||
import ca.uhn.fhir.jpa.entity.*;
|
||||
import ca.uhn.fhir.jpa.entity.BaseHasResource;
|
||||
import ca.uhn.fhir.jpa.entity.BaseResourceIndexedSearchParam;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamDate;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamNumber;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamQuantity;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamUri;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceLink;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceTag;
|
||||
import ca.uhn.fhir.jpa.entity.SearchParam;
|
||||
import ca.uhn.fhir.jpa.entity.SearchParamPresent;
|
||||
import ca.uhn.fhir.jpa.entity.TagDefinition;
|
||||
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
|
||||
import ca.uhn.fhir.jpa.term.IHapiTerminologySvc;
|
||||
import ca.uhn.fhir.jpa.term.VersionIndependentConcept;
|
||||
import ca.uhn.fhir.jpa.util.StopWatch;
|
||||
import ca.uhn.fhir.model.api.*;
|
||||
import ca.uhn.fhir.model.base.composite.*;
|
||||
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
||||
import ca.uhn.fhir.model.base.composite.BaseIdentifierDt;
|
||||
import ca.uhn.fhir.model.base.composite.BaseQuantityDt;
|
||||
import ca.uhn.fhir.model.dstu.resource.BaseResource;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
|
@ -60,9 +115,23 @@ import ca.uhn.fhir.parser.DataFormatException;
|
|||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.method.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import ca.uhn.fhir.rest.param.CompositeParam;
|
||||
import ca.uhn.fhir.rest.param.DateParam;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.param.HasParam;
|
||||
import ca.uhn.fhir.rest.param.NumberParam;
|
||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||
import ca.uhn.fhir.rest.param.QuantityParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParamModifier;
|
||||
import ca.uhn.fhir.rest.param.UriParam;
|
||||
import ca.uhn.fhir.rest.param.UriParamQualifierEnum;
|
||||
import ca.uhn.fhir.rest.server.Constants;
|
||||
import ca.uhn.fhir.rest.server.exceptions.*;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
|
||||
/**
|
||||
|
@ -70,6 +139,8 @@ import ca.uhn.fhir.util.UrlUtil;
|
|||
* searchs for resources
|
||||
*/
|
||||
public class SearchBuilder implements ISearchBuilder {
|
||||
private static final List<Long> EMPTY_LONG_LIST = Collections.unmodifiableList(new ArrayList<Long>());
|
||||
|
||||
private static Long NO_MORE = Long.valueOf(-1);
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchBuilder.class);
|
||||
private List<Long> myAlsoIncludePids;
|
||||
|
@ -1048,7 +1119,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
if (!isBlank(unitsValue)) {
|
||||
code = theBuilder.equal(theFrom.get("myUnits"), unitsValue);
|
||||
}
|
||||
|
||||
|
||||
cmpValue = ObjectUtils.defaultIfNull(cmpValue, ParamPrefixEnum.EQUAL);
|
||||
final Expression<BigDecimal> path = theFrom.get("myValue");
|
||||
String invalidMessageName = "invalidQuantityPrefix";
|
||||
|
@ -1357,11 +1428,11 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
* Now perform the search
|
||||
*/
|
||||
final TypedQuery<Long> query = myEntityManager.createQuery(outerQuery);
|
||||
|
||||
|
||||
if (theMaximumResults != null) {
|
||||
query.setMaxResults(theMaximumResults);
|
||||
}
|
||||
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
|
@ -1629,12 +1700,6 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
List<ResourceLink> results = q.getResultList();
|
||||
for (ResourceLink resourceLink : results) {
|
||||
if (theReverseMode) {
|
||||
// if (theEverythingModeEnum.isEncounter()) {
|
||||
// if (resourceLink.getSourcePath().equals("Encounter.subject") ||
|
||||
// resourceLink.getSourcePath().equals("Encounter.patient")) {
|
||||
// nextRoundOmit.add(resourceLink.getSourceResourcePid());
|
||||
// }
|
||||
// }
|
||||
pidsToInclude.add(resourceLink.getSourceResourcePid());
|
||||
} else {
|
||||
pidsToInclude.add(resourceLink.getTargetResourcePid());
|
||||
|
@ -1745,10 +1810,10 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
}
|
||||
|
||||
private void searchForIdsWithAndOr(String theResourceName, String theParamName, List<List<? extends IQueryParameterType>> theAndOrParams) {
|
||||
|
||||
|
||||
for (int andListIdx = 0; andListIdx < theAndOrParams.size(); andListIdx++) {
|
||||
List<? extends IQueryParameterType> nextOrList = theAndOrParams.get(andListIdx);
|
||||
|
||||
|
||||
for (int orListIdx = 0; orListIdx < nextOrList.size(); orListIdx++) {
|
||||
IQueryParameterType nextOr = nextOrList.get(orListIdx);
|
||||
boolean hasNoValue = false;
|
||||
|
@ -1760,14 +1825,14 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
hasNoValue = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (hasNoValue) {
|
||||
ourLog.debug("Ignoring empty parameter: {}", theParamName);
|
||||
nextOrList.remove(orListIdx);
|
||||
orListIdx--;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (nextOrList.isEmpty()) {
|
||||
theAndOrParams.remove(andListIdx);
|
||||
andListIdx--;
|
||||
|
@ -1777,7 +1842,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
if (theAndOrParams.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if (theParamName.equals(BaseResource.SP_RES_ID)) {
|
||||
|
||||
addPredicateResourceId(theAndOrParams);
|
||||
|
@ -1972,6 +2037,63 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
static Predicate[] toArray(List<Predicate> thePredicates) {
|
||||
return thePredicates.toArray(new Predicate[thePredicates.size()]);
|
||||
}
|
||||
public class IncludesIterator implements Iterator<Long>{
|
||||
|
||||
private Iterator<Long> myCurrentIterator;
|
||||
private int myCurrentOffset;
|
||||
private ArrayList<Long> myCurrentPids;
|
||||
private Long myNext;
|
||||
private int myPageSize = myCallingDao.getConfig().getEverythingIncludesFetchPageSize();
|
||||
|
||||
public IncludesIterator(Set<Long> thePidSet) {
|
||||
myCurrentPids = new ArrayList<Long>(thePidSet);
|
||||
myCurrentIterator = EMPTY_LONG_LIST.iterator();
|
||||
myCurrentOffset = 0;
|
||||
}
|
||||
|
||||
private void fetchNext() {
|
||||
while (myNext == null) {
|
||||
|
||||
if (myCurrentIterator.hasNext()) {
|
||||
myNext = myCurrentIterator.next();
|
||||
break;
|
||||
}
|
||||
|
||||
if (!myCurrentIterator.hasNext()) {
|
||||
int start = myCurrentOffset;
|
||||
int end = myCurrentOffset + myPageSize;
|
||||
if (end > myCurrentPids.size()) {
|
||||
end = myCurrentPids.size();
|
||||
}
|
||||
if (end - start <= 0) {
|
||||
myNext = NO_MORE;
|
||||
break;
|
||||
}
|
||||
myCurrentOffset = end;
|
||||
Collection<Long> pidsToScan = myCurrentPids.subList(start, end);
|
||||
Set<Include> includes = Collections.singleton(new Include("*", true));
|
||||
Set<Long> newPids = loadReverseIncludes(myCallingDao, myContext, myEntityManager, pidsToScan, includes, false, myParams.getLastUpdated());
|
||||
myCurrentIterator = newPids.iterator();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
fetchNext();
|
||||
return myNext != NO_MORE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long next() {
|
||||
fetchNext();
|
||||
Long retVal = myNext;
|
||||
myNext = null;
|
||||
return retVal;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private enum JoinEnum {
|
||||
DATE, NUMBER, QUANTITY, REFERENCE, STRING, TOKEN, URI
|
||||
|
@ -2007,16 +2129,24 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
}
|
||||
|
||||
private final class QueryIterator implements Iterator<Long> {
|
||||
|
||||
private boolean myFirst = true;
|
||||
private IncludesIterator myIncludesIterator;
|
||||
private Long myNext;
|
||||
private final Set<Long> myPidSet = new HashSet<Long>();
|
||||
private Iterator<Long> myPreResultsIterator;
|
||||
private Iterator<Long> myResultsIterator;
|
||||
private SortSpec mySort;
|
||||
private boolean myStillNeedToFetchIncludes;
|
||||
private StopWatch myStopwatch = null;
|
||||
|
||||
private QueryIterator() {
|
||||
mySort = myParams.getSort();
|
||||
|
||||
// Includes are processed inline for $everything query
|
||||
if (myParams.getEverythingMode() != null) {
|
||||
myStillNeedToFetchIncludes = true;
|
||||
}
|
||||
}
|
||||
|
||||
private void fetchNext() {
|
||||
|
@ -2061,7 +2191,24 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
}
|
||||
|
||||
if (myNext == null) {
|
||||
myNext = NO_MORE;
|
||||
if (myStillNeedToFetchIncludes) {
|
||||
myIncludesIterator = new IncludesIterator(myPidSet);
|
||||
myStillNeedToFetchIncludes = false;
|
||||
}
|
||||
if (myIncludesIterator != null) {
|
||||
while (myIncludesIterator.hasNext()) {
|
||||
Long next = myIncludesIterator.next();
|
||||
if (next != null && myPidSet.add(next)) {
|
||||
myNext = next;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (myNext == null) {
|
||||
myNext = NO_MORE;
|
||||
}
|
||||
} else {
|
||||
myNext = NO_MORE;
|
||||
}
|
||||
}
|
||||
|
||||
} // if we need to fetch the next result
|
||||
|
@ -2070,9 +2217,11 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
ourLog.info("Initial query result returned in {}ms for query {}", myStopwatch.getMillis(), mySearchUuid);
|
||||
myFirst = false;
|
||||
}
|
||||
|
||||
if (myNext == NO_MORE) {
|
||||
ourLog.info("Query found {} matches in {}ms for query {}", myPidSet.size(), myStopwatch.getMillis(), mySearchUuid);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -241,8 +241,8 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
Set<Long> includedPids = new HashSet<Long>();
|
||||
if (mySearchEntity.getSearchType() == SearchTypeEnum.SEARCH) {
|
||||
includedPids.addAll(sb.loadReverseIncludes(myDao, myContext, myEntityManager, pidsSubList, mySearchEntity.toRevIncludesList(), true, mySearchEntity.getLastUpdated()));
|
||||
includedPids.addAll(sb.loadReverseIncludes(myDao, myContext, myEntityManager, pidsSubList, mySearchEntity.toIncludesList(), false, mySearchEntity.getLastUpdated()));
|
||||
}
|
||||
includedPids.addAll(sb.loadReverseIncludes(myDao, myContext, myEntityManager, pidsSubList, mySearchEntity.toIncludesList(), false, mySearchEntity.getLastUpdated()));
|
||||
|
||||
// Execute the query and make sure we return distinct results
|
||||
List<IBaseResource> resources = new ArrayList<IBaseResource>();
|
||||
|
|
|
@ -0,0 +1,254 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsInRelativeOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.hasItems;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.Matchers.stringContainsInOrder;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.http.NameValuePair;
|
||||
import org.apache.http.client.ClientProtocolException;
|
||||
import org.apache.http.client.entity.UrlEncodedFormEntity;
|
||||
import org.apache.http.client.methods.*;
|
||||
import org.apache.http.entity.*;
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
import org.hl7.fhir.dstu3.model.*;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.*;
|
||||
import org.hl7.fhir.dstu3.model.Encounter.EncounterLocationComponent;
|
||||
import org.hl7.fhir.dstu3.model.Encounter.EncounterStatus;
|
||||
import org.hl7.fhir.dstu3.model.Enumerations.AdministrativeGender;
|
||||
import org.hl7.fhir.dstu3.model.Narrative.NarrativeStatus;
|
||||
import org.hl7.fhir.dstu3.model.Observation.ObservationStatus;
|
||||
import org.hl7.fhir.dstu3.model.Questionnaire.QuestionnaireItemType;
|
||||
import org.hl7.fhir.dstu3.model.Subscription.SubscriptionChannelType;
|
||||
import org.hl7.fhir.dstu3.model.Subscription.SubscriptionStatus;
|
||||
import org.hl7.fhir.instance.model.Encounter.EncounterState;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
import org.junit.*;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.model.primitive.UriDt;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.SummaryEnum;
|
||||
import ca.uhn.fhir.rest.client.IGenericClient;
|
||||
import ca.uhn.fhir.rest.gclient.StringClientParam;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import ca.uhn.fhir.rest.server.Constants;
|
||||
import ca.uhn.fhir.rest.server.EncodingEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.*;
|
||||
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
|
||||
public class PatientEverythingDstu3Test extends BaseResourceProviderDstu3Test {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(PatientEverythingDstu3Test.class);
|
||||
private String orgId;
|
||||
private String patId;
|
||||
private String encId1;
|
||||
private String encId2;
|
||||
private ArrayList<String> myObsIds;
|
||||
private String myWrongPatId;
|
||||
private String myWrongEnc1;
|
||||
|
||||
@Before
|
||||
public void beforeDisableResultReuse() {
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void after() throws Exception {
|
||||
super.after();
|
||||
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(new DaoConfig().getReuseCachedSearchResultsForMillis());
|
||||
myDaoConfig.setEverythingIncludesFetchPageSize(new DaoConfig().getEverythingIncludesFetchPageSize());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
myFhirCtx.setParserErrorHandler(new StrictErrorHandler());
|
||||
|
||||
myDaoConfig.setAllowMultipleDelete(true);
|
||||
|
||||
Organization org = new Organization();
|
||||
org.setName("an org");
|
||||
orgId = ourClient.create().resource(org).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
ourLog.info("OrgId: {}", orgId);
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.getManagingOrganization().setReference(orgId);
|
||||
patId = ourClient.create().resource(patient).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
Patient patient2 = new Patient();
|
||||
patient2.getManagingOrganization().setReference(orgId);
|
||||
myWrongPatId = ourClient.create().resource(patient2).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
Encounter enc1 = new Encounter();
|
||||
enc1.setStatus(EncounterStatus.CANCELLED);
|
||||
enc1.getSubject().setReference(patId);
|
||||
enc1.getServiceProvider().setReference(orgId);
|
||||
encId1 = ourClient.create().resource(enc1).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
Encounter enc2 = new Encounter();
|
||||
enc2.setStatus(EncounterStatus.ARRIVED);
|
||||
enc2.getSubject().setReference(patId);
|
||||
enc2.getServiceProvider().setReference(orgId);
|
||||
encId2 = ourClient.create().resource(enc2).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
Encounter wrongEnc1 = new Encounter();
|
||||
wrongEnc1.setStatus(EncounterStatus.ARRIVED);
|
||||
wrongEnc1.getSubject().setReference(myWrongPatId);
|
||||
wrongEnc1.getServiceProvider().setReference(orgId);
|
||||
myWrongEnc1 = ourClient.create().resource(wrongEnc1).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
myObsIds = new ArrayList<String>();
|
||||
for (int i = 0; i < 20; i++) {
|
||||
Observation obs = new Observation();
|
||||
obs.getSubject().setReference(patId);
|
||||
obs.setStatus(ObservationStatus.FINAL);
|
||||
String obsId = ourClient.create().resource(obs).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
myObsIds.add(obsId);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* See #674
|
||||
*/
|
||||
@Test
|
||||
public void testEverythingReturnsCorrectResources() throws Exception {
|
||||
|
||||
Bundle bundle = fetchBundle(ourServerBase + "/" + patId + "/$everything?_format=json&_count=100", EncodingEnum.JSON);
|
||||
|
||||
assertNull(bundle.getLink("next"));
|
||||
|
||||
Set<String> actual = new TreeSet<String>();
|
||||
for (BundleEntryComponent nextEntry : bundle.getEntry()) {
|
||||
actual.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
||||
}
|
||||
|
||||
ourLog.info("Found IDs: {}", actual);
|
||||
|
||||
assertThat(actual, hasItem(patId));
|
||||
assertThat(actual, hasItem(encId1));
|
||||
assertThat(actual, hasItem(encId2));
|
||||
assertThat(actual, hasItem(orgId));
|
||||
assertThat(actual, hasItems(myObsIds.toArray(new String[0])));
|
||||
assertThat(actual, not(hasItem(myWrongPatId)));
|
||||
assertThat(actual, not(hasItem(myWrongEnc1)));
|
||||
}
|
||||
|
||||
/**
|
||||
* See #674
|
||||
*/
|
||||
@Test
|
||||
public void testEverythingReturnsCorrectResourcesSmallPage() throws Exception {
|
||||
myDaoConfig.setEverythingIncludesFetchPageSize(1);
|
||||
|
||||
Bundle bundle = fetchBundle(ourServerBase + "/" + patId + "/$everything?_format=json&_count=100", EncodingEnum.JSON);
|
||||
|
||||
assertNull(bundle.getLink("next"));
|
||||
|
||||
Set<String> actual = new TreeSet<String>();
|
||||
for (BundleEntryComponent nextEntry : bundle.getEntry()) {
|
||||
actual.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
||||
}
|
||||
|
||||
ourLog.info("Found IDs: {}", actual);
|
||||
|
||||
assertThat(actual, hasItem(patId));
|
||||
assertThat(actual, hasItem(encId1));
|
||||
assertThat(actual, hasItem(encId2));
|
||||
assertThat(actual, hasItem(orgId));
|
||||
assertThat(actual, hasItems(myObsIds.toArray(new String[0])));
|
||||
assertThat(actual, not(hasItem(myWrongPatId)));
|
||||
assertThat(actual, not(hasItem(myWrongEnc1)));
|
||||
}
|
||||
|
||||
/**
|
||||
* See #674
|
||||
*/
|
||||
@Test
|
||||
public void testEverythingPagesWithCorrectEncodingJson() throws Exception {
|
||||
|
||||
Bundle bundle = fetchBundle(ourServerBase + "/" + patId + "/$everything?_format=json&_count=1", EncodingEnum.JSON);
|
||||
|
||||
assertNotNull(bundle.getLink("next").getUrl());
|
||||
assertThat(bundle.getLink("next").getUrl(), containsString("_format=json"));
|
||||
bundle = fetchBundle(bundle.getLink("next").getUrl(), EncodingEnum.JSON);
|
||||
|
||||
assertNotNull(bundle.getLink("next").getUrl());
|
||||
assertThat(bundle.getLink("next").getUrl(), containsString("_format=json"));
|
||||
bundle = fetchBundle(bundle.getLink("next").getUrl(), EncodingEnum.JSON);
|
||||
}
|
||||
|
||||
/**
|
||||
* See #674
|
||||
*/
|
||||
@Test
|
||||
public void testEverythingPagesWithCorrectEncodingXml() throws Exception {
|
||||
|
||||
Bundle bundle = fetchBundle(ourServerBase + "/" + patId + "/$everything?_format=xml&_count=1", EncodingEnum.XML);
|
||||
|
||||
assertNotNull(bundle.getLink("next").getUrl());
|
||||
ourLog.info("Next link: {}", bundle.getLink("next").getUrl());
|
||||
assertThat(bundle.getLink("next").getUrl(), containsString("_format=xml"));
|
||||
bundle = fetchBundle(bundle.getLink("next").getUrl(), EncodingEnum.XML);
|
||||
|
||||
assertNotNull(bundle.getLink("next").getUrl());
|
||||
ourLog.info("Next link: {}", bundle.getLink("next").getUrl());
|
||||
assertThat(bundle.getLink("next").getUrl(), containsString("_format=xml"));
|
||||
bundle = fetchBundle(bundle.getLink("next").getUrl(), EncodingEnum.XML);
|
||||
}
|
||||
|
||||
private Bundle fetchBundle(String theUrl, EncodingEnum theEncoding) throws IOException, ClientProtocolException {
|
||||
Bundle bundle;
|
||||
HttpGet get = new HttpGet(theUrl);
|
||||
CloseableHttpResponse resp = ourHttpClient.execute(get);
|
||||
try {
|
||||
assertEquals(theEncoding.getResourceContentTypeNonLegacy(), resp.getFirstHeader(Constants.HEADER_CONTENT_TYPE).getValue().replaceAll(";.*", ""));
|
||||
bundle = theEncoding.newParser(myFhirCtx).parseResource(Bundle.class, IOUtils.toString(resp.getEntity().getContent(), Charsets.UTF_8));
|
||||
} finally {
|
||||
IOUtils.closeQuietly(resp);
|
||||
}
|
||||
|
||||
return bundle;
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
}
|
|
@ -47,6 +47,7 @@ import org.hl7.fhir.dstu3.model.Observation.ObservationStatus;
|
|||
import org.hl7.fhir.dstu3.model.Questionnaire.QuestionnaireItemType;
|
||||
import org.hl7.fhir.dstu3.model.Subscription.SubscriptionChannelType;
|
||||
import org.hl7.fhir.dstu3.model.Subscription.SubscriptionStatus;
|
||||
import org.hl7.fhir.instance.model.Encounter.EncounterState;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
import org.junit.*;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
|
|
14
pom.xml
14
pom.xml
|
@ -1774,6 +1774,20 @@
|
|||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
<profile>
|
||||
<id>MINPARALLEL</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<forkCount>2</forkCount>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
<profile>
|
||||
<id>ERRORPRONE</id>
|
||||
<build>
|
||||
|
|
|
@ -145,4 +145,26 @@
|
|||
</answer>
|
||||
</faq>
|
||||
</part>
|
||||
<part id="Contributing">
|
||||
<title>Contributing</title>
|
||||
<faq id="vm_quit_during_build">
|
||||
<question>
|
||||
My build is failing with the following error:
|
||||
<code>[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.19.1:test (default-test) on project hapi-fhir-jpaserver-base: Execution default-test of goal org.apache.maven.plugins:maven-surefire-plugin:2.19.1:test failed: The forked VM terminated without properly saying goodbye. VM crash or System.exit called?</code>
|
||||
</question>
|
||||
<answer>
|
||||
<p>
|
||||
This typically means that your build is running out of memory. HAPI's unit tests execute by
|
||||
default in multiple threads (the thread count is determined by the number of CPU cores available)
|
||||
so in an environment with lots of cores but not enough RAM, you may run out. If you are getting
|
||||
this error, try executing the build with the following arguments:
|
||||
</p>
|
||||
<pre>mvn -P ALLMODULES,NOPARALLEL install</pre>
|
||||
<p>
|
||||
See <a href="/hacking_hapi_fhir.html">Hacking HAPI FHIR</a> for more information on
|
||||
the build process.
|
||||
</p>
|
||||
</answer>
|
||||
</faq>
|
||||
</part>
|
||||
</faqs>
|
||||
|
|
Loading…
Reference in New Issue