Add multithreaded migrator tool
This commit is contained in:
commit
3f6960d82b
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.rest.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2018 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
|
|
|
@ -28,9 +28,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
|
|
|
@ -168,7 +168,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
|
||||
verifySearchHasntFailedOrThrowInternalErrorException(search);
|
||||
if (search.getStatus() == SearchStatusEnum.FINISHED) {
|
||||
ourLog.info("Search entity marked as finished");
|
||||
ourLog.info("Search entity marked as finished with {} results", search.getNumFound());
|
||||
break;
|
||||
}
|
||||
if (search.getNumFound() >= theTo) {
|
||||
|
@ -189,7 +189,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
search = newSearch.get();
|
||||
String resourceType = search.getResourceType();
|
||||
SearchParameterMap params = search.getSearchParameterMap();
|
||||
SearchContinuationTask task = new SearchContinuationTask(search, myDaoRegistry.getResourceDao(resourceType), params, resourceType);
|
||||
IFhirResourceDao<?> resourceDao = myDaoRegistry.getResourceDao(resourceType);
|
||||
SearchContinuationTask task = new SearchContinuationTask(search, resourceDao, params, resourceType);
|
||||
myIdToSearchTask.put(search.getUuid(), task);
|
||||
myExecutor.submit(task);
|
||||
}
|
||||
|
@ -228,10 +229,9 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
txTemplate.afterPropertiesSet();
|
||||
return txTemplate.execute(t -> {
|
||||
Optional<Search> searchOpt = mySearchDao.findById(theSearch.getId());
|
||||
Search search = searchOpt.orElseThrow(IllegalStateException::new);
|
||||
if (search.getStatus() != SearchStatusEnum.PASSCMPLET) {
|
||||
throw new IllegalStateException("Can't change to LOADING because state is " + search.getStatus());
|
||||
myEntityManager.refresh(theSearch);
|
||||
if (theSearch.getStatus() != SearchStatusEnum.PASSCMPLET) {
|
||||
throw new IllegalStateException("Can't change to LOADING because state is " + theSearch.getStatus());
|
||||
}
|
||||
theSearch.setStatus(SearchStatusEnum.LOADING);
|
||||
Search newSearch = mySearchDao.save(theSearch);
|
||||
|
@ -239,6 +239,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
});
|
||||
} catch (Exception e) {
|
||||
ourLog.warn("Failed to activate search: {}", e.toString());
|
||||
ourLog.trace("Failed to activate search", e);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
@ -438,6 +439,11 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
myManagedTxManager = theTxManager;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoRegistryForUnitTest(DaoRegistry theDaoRegistry) {
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
}
|
||||
|
||||
public abstract class BaseTask implements Callable<Void> {
|
||||
private final SearchParameterMap myParams;
|
||||
private final IDao myCallingDao;
|
||||
|
@ -486,14 +492,41 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
}
|
||||
|
||||
public List<Long> getResourcePids(int theFromIndex, int theToIndex) {
|
||||
ourLog.info("Requesting search PIDs from {}-{}", theFromIndex, theToIndex);
|
||||
ourLog.debug("Requesting search PIDs from {}-{}", theFromIndex, theToIndex);
|
||||
|
||||
boolean keepWaiting;
|
||||
do {
|
||||
synchronized (mySyncedPids) {
|
||||
ourLog.trace("Search status is {}", mySearch.getStatus());
|
||||
keepWaiting = mySyncedPids.size() < theToIndex && mySearch.getStatus() == SearchStatusEnum.LOADING;
|
||||
boolean haveEnoughResults = mySyncedPids.size() >= theToIndex;
|
||||
if (!haveEnoughResults) {
|
||||
switch (mySearch.getStatus()) {
|
||||
case LOADING:
|
||||
keepWaiting = true;
|
||||
break;
|
||||
case PASSCMPLET:
|
||||
/*
|
||||
* If we get here, it means that the user requested resources that crossed the
|
||||
* current pre-fetch boundary. For example, if the prefetch threshold is 50 and the
|
||||
* user has requested resources 0-60, then they would get 0-50 back but the search
|
||||
* coordinator would then stop searching.SearchCoordinatorSvcImplTest
|
||||
*/
|
||||
List<Long> remainingResources = SearchCoordinatorSvcImpl.this.getResources(mySearch.getUuid(), mySyncedPids.size(), theToIndex);
|
||||
ourLog.debug("Adding {} resources to the existing {} synced resource IDs", remainingResources.size(), mySyncedPids.size());
|
||||
mySyncedPids.addAll(remainingResources);
|
||||
keepWaiting = false;
|
||||
break;
|
||||
case FAILED:
|
||||
case FINISHED:
|
||||
default:
|
||||
keepWaiting = false;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
keepWaiting = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (keepWaiting) {
|
||||
ourLog.info("Waiting, as we only have {} results", mySyncedPids.size());
|
||||
try {
|
||||
|
@ -808,6 +841,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
* Construct the SQL query we'll be sending to the database
|
||||
*/
|
||||
IResultIterator theResultIterator = sb.createQuery(myParams, mySearch.getUuid());
|
||||
assert (theResultIterator != null);
|
||||
|
||||
/*
|
||||
* The following loop actually loads the PIDs of the resources
|
||||
|
@ -869,6 +903,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
txTemplate.afterPropertiesSet();
|
||||
txTemplate.execute(t -> {
|
||||
List<Long> previouslyAddedResourcePids = mySearchResultDao.findWithSearchUuid(getSearch());
|
||||
ourLog.debug("Have {} previously added IDs in search: {}", previouslyAddedResourcePids.size(), getSearch().getUuid());
|
||||
setPreviouslyAddedResourcePids(previouslyAddedResourcePids);
|
||||
return null;
|
||||
});
|
||||
|
|
|
@ -17,13 +17,7 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
|||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.Matchers.stringContainsInOrder;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
|
@ -41,6 +35,7 @@ import java.util.HashSet;
|
|||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
@ -159,6 +154,50 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchLinksWorkWithIncludes() {
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
||||
Organization o = new Organization();
|
||||
o.setId("O" + i);
|
||||
o.setName("O" + i);
|
||||
IIdType oid = ourClient.update().resource(o).execute().getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setId("P" + i);
|
||||
p.getManagingOrganization().setReference(oid.getValue());
|
||||
ourClient.update().resource(p).execute();
|
||||
|
||||
}
|
||||
|
||||
Bundle output = ourClient
|
||||
.search()
|
||||
.forResource("Patient")
|
||||
.include(IBaseResource.INCLUDE_ALL)
|
||||
.count(3)
|
||||
.returnBundle(Bundle.class)
|
||||
.execute();
|
||||
|
||||
List<String> ids = output.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList());
|
||||
ourLog.info("Ids: {}", ids);
|
||||
assertEquals(6, output.getEntry().size());
|
||||
assertNotNull(output.getLink("next"));
|
||||
|
||||
// Page 2
|
||||
output = ourClient
|
||||
.loadPage()
|
||||
.next(output)
|
||||
.execute();
|
||||
|
||||
ids = output.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList());
|
||||
ourLog.info("Ids: {}", ids);
|
||||
assertEquals(4, output.getEntry().size());
|
||||
assertNull(output.getLink("next"));
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDeleteConditional() {
|
||||
|
||||
|
@ -1658,27 +1697,25 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
.returnResourceType(Bundle.class)
|
||||
.execute();
|
||||
|
||||
TreeSet<String> ids = new TreeSet<>();
|
||||
ArrayList<String> ids = new ArrayList<>();
|
||||
for (int i = 0; i < responseBundle.getEntry().size(); i++) {
|
||||
for (BundleEntryComponent nextEntry : responseBundle.getEntry()) {
|
||||
ids.add(nextEntry.getResource().getIdElement().getIdPart());
|
||||
}
|
||||
BundleEntryComponent nextEntry = responseBundle.getEntry().get(i);
|
||||
ids.add(nextEntry.getResource().getIdElement().getIdPart());
|
||||
}
|
||||
|
||||
BundleLinkComponent nextLink = responseBundle.getLink("next");
|
||||
ourLog.info("Have {} IDs with next link: ", ids.size(), nextLink);
|
||||
ourLog.info("Have {} IDs with next link[{}] : {}", ids.size(), nextLink, ids);
|
||||
|
||||
while (nextLink != null) {
|
||||
String nextUrl = nextLink.getUrl();
|
||||
responseBundle = ourClient.fetchResourceFromUrl(Bundle.class, nextUrl);
|
||||
for (int i = 0; i < responseBundle.getEntry().size(); i++) {
|
||||
for (BundleEntryComponent nextEntry : responseBundle.getEntry()) {
|
||||
ids.add(nextEntry.getResource().getIdElement().getIdPart());
|
||||
}
|
||||
BundleEntryComponent nextEntry = responseBundle.getEntry().get(i);
|
||||
ids.add(nextEntry.getResource().getIdElement().getIdPart());
|
||||
}
|
||||
|
||||
nextLink = responseBundle.getLink("next");
|
||||
ourLog.info("Have {} IDs with next link: ", ids.size(), nextLink);
|
||||
ourLog.info("Have {} IDs with next link[{}] : {}", ids.size(), nextLink, ids);
|
||||
}
|
||||
|
||||
assertThat(ids, hasItem(id.getIdPart()));
|
||||
|
|
|
@ -30,8 +30,11 @@ import org.mockito.Mockito;
|
|||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
import org.mockito.stubbing.Answer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
|
@ -53,7 +56,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
@Captor
|
||||
ArgumentCaptor<Iterable<SearchResult>> mySearchResultIterCaptor;
|
||||
@Mock
|
||||
private IDao myCallingDao;
|
||||
private IFhirResourceDao<?> myCallingDao;
|
||||
@Mock
|
||||
private EntityManager myEntityManager;
|
||||
private int myExpectedNumberOfSearchBuildersCreated = 2;
|
||||
|
@ -70,6 +73,9 @@ public class SearchCoordinatorSvcImplTest {
|
|||
@Mock
|
||||
private PlatformTransactionManager myTxManager;
|
||||
private DaoConfig myDaoConfig;
|
||||
private Search myCurrentSearch;
|
||||
@Mock
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
@After
|
||||
public void after() {
|
||||
|
@ -78,6 +84,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
|
||||
@Before
|
||||
public void before() {
|
||||
myCurrentSearch = null;
|
||||
|
||||
mySvc = new SearchCoordinatorSvcImpl();
|
||||
mySvc.setEntityManagerForUnitTest(myEntityManager);
|
||||
|
@ -86,6 +93,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
mySvc.setSearchDaoForUnitTest(mySearchDao);
|
||||
mySvc.setSearchDaoIncludeForUnitTest(mySearchIncludeDao);
|
||||
mySvc.setSearchDaoResultForUnitTest(mySearchResultDao);
|
||||
mySvc.setDaoRegistryForUnitTest(myDaoRegistry);
|
||||
|
||||
myDaoConfig = new DaoConfig();
|
||||
mySvc.setDaoConfigForUnitTest(myDaoConfig);
|
||||
|
@ -151,24 +159,43 @@ public class SearchCoordinatorSvcImplTest {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SearchCoordinatorSvcImplTest.class);
|
||||
@Test
|
||||
public void testAsyncSearchLargeResultSetBigCountSameCoordinator() {
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.add("name", new StringParam("ANAME"));
|
||||
|
||||
List<Long> pids = createPidSequence(10, 800);
|
||||
IResultIterator iter = new SlowIterator(pids.iterator(), 1);
|
||||
when(mySearchBuider.createQuery(Mockito.same(params), any(String.class))).thenReturn(iter);
|
||||
|
||||
SlowIterator iter = new SlowIterator(pids.iterator(), 1);
|
||||
when(mySearchBuider.createQuery(any(), any(String.class))).thenReturn(iter);
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||
|
||||
when(mySearchResultDao.findWithSearchUuid(any(), any())).thenAnswer(t -> {
|
||||
List<Long> returnedValues = iter.getReturnedValues();
|
||||
Pageable page = (Pageable) t.getArguments()[1];
|
||||
int offset = (int) page.getOffset();
|
||||
int end = (int)(page.getOffset() + page.getPageSize());
|
||||
end = Math.min(end, returnedValues.size());
|
||||
offset = Math.min(offset, returnedValues.size());
|
||||
ourLog.info("findWithSearchUuid {} - {} out of {} values", offset, end, returnedValues.size());
|
||||
return new PageImpl<>(returnedValues.subList(offset, end));
|
||||
});
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||
assertNotNull(result.getUuid());
|
||||
assertEquals(null, result.size());
|
||||
|
||||
List<IBaseResource> resources;
|
||||
|
||||
when(mySearchDao.save(any())).thenAnswer(t -> {
|
||||
Search search = (Search) t.getArguments()[0];
|
||||
myCurrentSearch = search;
|
||||
return search;
|
||||
});
|
||||
when(mySearchDao.findByUuid(any())).thenAnswer(t -> myCurrentSearch);
|
||||
IFhirResourceDao dao = myCallingDao;
|
||||
when(myDaoRegistry.getResourceDao(any())).thenReturn(dao);
|
||||
|
||||
resources = result.getResources(0, 100000);
|
||||
assertEquals(790, resources.size());
|
||||
assertEquals("10", resources.get(0).getIdElement().getValueAsString());
|
||||
|
@ -178,7 +205,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
verify(mySearchDao, atLeastOnce()).save(searchCaptor.capture());
|
||||
|
||||
verify(mySearchResultDao, atLeastOnce()).saveAll(mySearchResultIterCaptor.capture());
|
||||
List<SearchResult> allResults = new ArrayList<SearchResult>();
|
||||
List<SearchResult> allResults = new ArrayList<>();
|
||||
for (Iterable<SearchResult> next : mySearchResultIterCaptor.getAllValues()) {
|
||||
allResults.addAll(Lists.newArrayList(next));
|
||||
}
|
||||
|
@ -186,6 +213,8 @@ public class SearchCoordinatorSvcImplTest {
|
|||
assertEquals(790, allResults.size());
|
||||
assertEquals(10, allResults.get(0).getResourcePid().longValue());
|
||||
assertEquals(799, allResults.get(789).getResourcePid().longValue());
|
||||
|
||||
myExpectedNumberOfSearchBuildersCreated = 3;
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -224,7 +253,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
List<Long> pids = createPidSequence(10, 800);
|
||||
IResultIterator iter = new SlowIterator(pids.iterator(), 2);
|
||||
when(mySearchBuider.createQuery(Mockito.same(params), any(String.class))).thenReturn(iter);
|
||||
|
||||
when(mySearchDao.save(any())).thenAnswer(t -> t.getArguments()[0]);
|
||||
doAnswer(loadPids()).when(mySearchBuider).loadResourcesByPid(any(List.class), any(List.class), any(Set.class), anyBoolean(), any(EntityManager.class), any(FhirContext.class), same(myCallingDao));
|
||||
|
||||
IBundleProvider result = mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective());
|
||||
|
@ -257,12 +286,6 @@ public class SearchCoordinatorSvcImplTest {
|
|||
assertEquals("20", resources.get(0).getIdElement().getValueAsString());
|
||||
assertEquals("29", resources.get(9).getIdElement().getValueAsString());
|
||||
|
||||
provider = new PersistedJpaBundleProvider(result.getUuid(), myCallingDao);
|
||||
resources = provider.getResources(20, 99999);
|
||||
assertEquals(770, resources.size());
|
||||
assertEquals("30", resources.get(0).getIdElement().getValueAsString());
|
||||
assertEquals("799", resources.get(769).getIdElement().getValueAsString());
|
||||
|
||||
myExpectedNumberOfSearchBuildersCreated = 4;
|
||||
}
|
||||
|
||||
|
@ -452,11 +475,19 @@ public class SearchCoordinatorSvcImplTest {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* THIS CLASS IS FOR UNIT TESTS ONLY - It is delioberately inefficient
|
||||
* and keeps things in memory.
|
||||
* <p>
|
||||
* Don't use it in real code!
|
||||
*/
|
||||
public static class SlowIterator extends BaseIterator<Long> implements IResultIterator {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SlowIterator.class);
|
||||
private final IResultIterator myResultIteratorWrap;
|
||||
private int myDelay;
|
||||
private Iterator<Long> myWrap;
|
||||
private List<Long> myReturnedValues = new ArrayList<>();
|
||||
|
||||
public SlowIterator(Iterator<Long> theWrap, int theDelay) {
|
||||
myWrap = theWrap;
|
||||
|
@ -470,9 +501,17 @@ public class SearchCoordinatorSvcImplTest {
|
|||
myDelay = theDelay;
|
||||
}
|
||||
|
||||
public List<Long> getReturnedValues() {
|
||||
return myReturnedValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return myWrap.hasNext();
|
||||
boolean retVal = myWrap.hasNext();
|
||||
if (!retVal) {
|
||||
ourLog.info("No more results remaining");
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -482,7 +521,9 @@ public class SearchCoordinatorSvcImplTest {
|
|||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
return myWrap.next();
|
||||
Long retVal = myWrap.next();
|
||||
myReturnedValues.add(retVal);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -31,6 +31,10 @@
|
|||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-jdbc</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-dbcp2</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- This dependency includes the core HAPI-FHIR classes -->
|
||||
<dependency>
|
||||
|
@ -45,11 +49,6 @@
|
|||
<artifactId>derby</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-dbcp2</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.migrate;
|
||||
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -76,20 +77,13 @@ public enum DriverTypeEnum {
|
|||
throw new InternalErrorException("Unable to find driver class: " + myDriverClassName, e);
|
||||
}
|
||||
|
||||
SingleConnectionDataSource dataSource = new SingleConnectionDataSource(){
|
||||
@Override
|
||||
protected Connection getConnectionFromDriver(Properties props) throws SQLException {
|
||||
Connection connect = driver.connect(theUrl, props);
|
||||
assert connect != null;
|
||||
return connect;
|
||||
}
|
||||
};
|
||||
dataSource.setAutoCommit(false);
|
||||
BasicDataSource dataSource = new BasicDataSource();
|
||||
// dataSource.setAutoCommit(false);
|
||||
dataSource.setDriverClassName(myDriverClassName);
|
||||
dataSource.setUrl(theUrl);
|
||||
dataSource.setUsername(theUsername);
|
||||
dataSource.setPassword(thePassword);
|
||||
dataSource.setSuppressClose(true);
|
||||
// dataSource.setSuppressClose(true);
|
||||
|
||||
DataSourceTransactionManager transactionManager = new DataSourceTransactionManager();
|
||||
transactionManager.setDataSource(dataSource);
|
||||
|
|
|
@ -23,15 +23,21 @@ package ca.uhn.fhir.jpa.migrate.taskdef;
|
|||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.collect.ForwardingMap;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
|
||||
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.core.ColumnMapRowMapper;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.jdbc.core.RowCallbackHandler;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask> {
|
||||
|
@ -39,75 +45,147 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(CalculateHashesTask.class);
|
||||
private int myBatchSize = 10000;
|
||||
private Map<String, Function<MandatoryKeyMap<String, Object>, Long>> myCalculators = new HashMap<>();
|
||||
private ThreadPoolExecutor myExecutor;
|
||||
|
||||
public void setBatchSize(int theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public CalculateHashesTask() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() {
|
||||
public synchronized void execute() throws SQLException {
|
||||
if (isDryRun()) {
|
||||
return;
|
||||
}
|
||||
|
||||
List<Map<String, Object>> rows;
|
||||
do {
|
||||
rows = getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = newJdbcTemnplate();
|
||||
jdbcTemplate.setMaxRows(myBatchSize);
|
||||
String sql = "SELECT * FROM " + getTableName() + " WHERE " + getColumnName() + " IS NULL";
|
||||
ourLog.info("Finding up to {} rows in {} that requires hashes", myBatchSize, getTableName());
|
||||
return jdbcTemplate.queryForList(sql);
|
||||
});
|
||||
initializeExecutor();
|
||||
try {
|
||||
|
||||
updateRows(rows);
|
||||
} while (rows.size() > 0);
|
||||
}
|
||||
while(true) {
|
||||
MyRowCallbackHandler rch = new MyRowCallbackHandler();
|
||||
getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = newJdbcTemnplate();
|
||||
jdbcTemplate.setMaxRows(100000);
|
||||
String sql = "SELECT * FROM " + getTableName() + " WHERE " + getColumnName() + " IS NULL";
|
||||
ourLog.info("Finding up to {} rows in {} that requires hashes", myBatchSize, getTableName());
|
||||
|
||||
private void updateRows(List<Map<String, Object>> theRows) {
|
||||
StopWatch sw = new StopWatch();
|
||||
getTxTemplate().execute(t -> {
|
||||
jdbcTemplate.query(sql, rch);
|
||||
rch.done();
|
||||
|
||||
// Loop through rows
|
||||
assert theRows != null;
|
||||
for (Map<String, Object> nextRow : theRows) {
|
||||
return null;
|
||||
});
|
||||
|
||||
Map<String, Long> newValues = new HashMap<>();
|
||||
MandatoryKeyMap<String, Object> nextRowMandatoryKeyMap = new MandatoryKeyMap<>(nextRow);
|
||||
|
||||
// Apply calculators
|
||||
for (Map.Entry<String, Function<MandatoryKeyMap<String, Object>, Long>> nextCalculatorEntry : myCalculators.entrySet()) {
|
||||
String nextColumn = nextCalculatorEntry.getKey();
|
||||
Function<MandatoryKeyMap<String, Object>, Long> nextCalculator = nextCalculatorEntry.getValue();
|
||||
Long value = nextCalculator.apply(nextRowMandatoryKeyMap);
|
||||
newValues.put(nextColumn, value);
|
||||
rch.submitNext();
|
||||
List<Future<?>> futures = rch.getFutures();
|
||||
if (futures.isEmpty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Generate update SQL
|
||||
StringBuilder sqlBuilder = new StringBuilder();
|
||||
List<Long> arguments = new ArrayList<>();
|
||||
sqlBuilder.append("UPDATE ");
|
||||
sqlBuilder.append(getTableName());
|
||||
sqlBuilder.append(" SET ");
|
||||
for (Map.Entry<String, Long> nextNewValueEntry : newValues.entrySet()) {
|
||||
if (arguments.size() > 0) {
|
||||
sqlBuilder.append(", ");
|
||||
ourLog.info("Waiting for {} tasks to complete", futures.size());
|
||||
for (Future<?> next : futures) {
|
||||
try {
|
||||
next.get();
|
||||
} catch (Exception e) {
|
||||
throw new SQLException(e);
|
||||
}
|
||||
sqlBuilder.append(nextNewValueEntry.getKey()).append(" = ?");
|
||||
arguments.add(nextNewValueEntry.getValue());
|
||||
}
|
||||
sqlBuilder.append(" WHERE SP_ID = ?");
|
||||
arguments.add((Long) nextRow.get("SP_ID"));
|
||||
|
||||
// Apply update SQL
|
||||
newJdbcTemnplate().update(sqlBuilder.toString(), arguments.toArray());
|
||||
|
||||
}
|
||||
|
||||
return theRows.size();
|
||||
});
|
||||
ourLog.info("Updated {} rows on {} in {}", theRows.size(), getTableName(), sw.toString());
|
||||
} finally {
|
||||
destroyExecutor();
|
||||
}
|
||||
}
|
||||
|
||||
private void destroyExecutor() {
|
||||
myExecutor.shutdownNow();
|
||||
}
|
||||
|
||||
private void initializeExecutor() {
|
||||
int maximumPoolSize = Runtime.getRuntime().availableProcessors();
|
||||
|
||||
LinkedBlockingQueue<Runnable> executorQueue = new LinkedBlockingQueue<>(maximumPoolSize);
|
||||
BasicThreadFactory threadFactory = new BasicThreadFactory.Builder()
|
||||
.namingPattern("worker-" + "-%d")
|
||||
.daemon(false)
|
||||
.priority(Thread.NORM_PRIORITY)
|
||||
.build();
|
||||
RejectedExecutionHandler rejectedExecutionHandler = new RejectedExecutionHandler() {
|
||||
@Override
|
||||
public void rejectedExecution(Runnable theRunnable, ThreadPoolExecutor theExecutor) {
|
||||
ourLog.info("Note: Executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size());
|
||||
StopWatch sw = new StopWatch();
|
||||
try {
|
||||
executorQueue.put(theRunnable);
|
||||
} catch (InterruptedException theE) {
|
||||
throw new RejectedExecutionException("Task " + theRunnable.toString() +
|
||||
" rejected from " + theE.toString());
|
||||
}
|
||||
ourLog.info("Slot become available after {}ms", sw.getMillis());
|
||||
}
|
||||
};
|
||||
myExecutor = new ThreadPoolExecutor(
|
||||
1,
|
||||
maximumPoolSize,
|
||||
0L,
|
||||
TimeUnit.MILLISECONDS,
|
||||
executorQueue,
|
||||
threadFactory,
|
||||
rejectedExecutionHandler);
|
||||
}
|
||||
|
||||
private Future<?> updateRows(List<Map<String, Object>> theRows) {
|
||||
Runnable task = () -> {
|
||||
StopWatch sw = new StopWatch();
|
||||
getTxTemplate().execute(t -> {
|
||||
|
||||
// Loop through rows
|
||||
assert theRows != null;
|
||||
for (Map<String, Object> nextRow : theRows) {
|
||||
|
||||
Map<String, Long> newValues = new HashMap<>();
|
||||
MandatoryKeyMap<String, Object> nextRowMandatoryKeyMap = new MandatoryKeyMap<>(nextRow);
|
||||
|
||||
// Apply calculators
|
||||
for (Map.Entry<String, Function<MandatoryKeyMap<String, Object>, Long>> nextCalculatorEntry : myCalculators.entrySet()) {
|
||||
String nextColumn = nextCalculatorEntry.getKey();
|
||||
Function<MandatoryKeyMap<String, Object>, Long> nextCalculator = nextCalculatorEntry.getValue();
|
||||
Long value = nextCalculator.apply(nextRowMandatoryKeyMap);
|
||||
newValues.put(nextColumn, value);
|
||||
}
|
||||
|
||||
// Generate update SQL
|
||||
StringBuilder sqlBuilder = new StringBuilder();
|
||||
List<Long> arguments = new ArrayList<>();
|
||||
sqlBuilder.append("UPDATE ");
|
||||
sqlBuilder.append(getTableName());
|
||||
sqlBuilder.append(" SET ");
|
||||
for (Map.Entry<String, Long> nextNewValueEntry : newValues.entrySet()) {
|
||||
if (arguments.size() > 0) {
|
||||
sqlBuilder.append(", ");
|
||||
}
|
||||
sqlBuilder.append(nextNewValueEntry.getKey()).append(" = ?");
|
||||
arguments.add(nextNewValueEntry.getValue());
|
||||
}
|
||||
sqlBuilder.append(" WHERE SP_ID = ?");
|
||||
arguments.add((Long) nextRow.get("SP_ID"));
|
||||
|
||||
// Apply update SQL
|
||||
newJdbcTemnplate().update(sqlBuilder.toString(), arguments.toArray());
|
||||
|
||||
}
|
||||
|
||||
return theRows.size();
|
||||
});
|
||||
ourLog.info("Updated {} rows on {} in {}", theRows.size(), getTableName(), sw.toString());
|
||||
};
|
||||
return myExecutor.submit(task);
|
||||
}
|
||||
|
||||
public CalculateHashesTask addCalculator(String theColumnName, Function<MandatoryKeyMap<String, Object>, Long> theConsumer) {
|
||||
|
@ -116,6 +194,39 @@ public class CalculateHashesTask extends BaseTableColumnTask<CalculateHashesTask
|
|||
return this;
|
||||
}
|
||||
|
||||
private class MyRowCallbackHandler implements RowCallbackHandler {
|
||||
|
||||
private List<Map<String, Object>> myRows = new ArrayList<>();
|
||||
private List<Future<?>> myFutures = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public void processRow(ResultSet rs) throws SQLException {
|
||||
Map<String, Object> row = new ColumnMapRowMapper().mapRow(rs, 0);
|
||||
myRows.add(row);
|
||||
|
||||
if (myRows.size() >= myBatchSize) {
|
||||
submitNext();
|
||||
}
|
||||
}
|
||||
|
||||
private void submitNext() {
|
||||
if (myRows.size() > 0) {
|
||||
myFutures.add(updateRows(myRows));
|
||||
myRows = new ArrayList<>();
|
||||
}
|
||||
}
|
||||
|
||||
public List<Future<?>> getFutures() {
|
||||
return myFutures;
|
||||
}
|
||||
|
||||
public void done() {
|
||||
if (myRows.size() > 0) {
|
||||
submitNext();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class MandatoryKeyMap<K, V> extends ForwardingMap<K, V> {
|
||||
|
||||
|
|
|
@ -277,6 +277,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
Builder.BuilderWithTableName spp = version.onTable("HFJ_RES_PARAM_PRESENT");
|
||||
version.startSectionWithMessage("Starting work on table: " + spp.getTableName());
|
||||
spp.dropIndex("IDX_RESPARMPRESENT_SPID_RESID");
|
||||
spp.dropColumn("SP_ID");
|
||||
spp
|
||||
.addColumn("HASH_PRESENCE")
|
||||
.nullable()
|
||||
|
|
|
@ -9,7 +9,7 @@ import java.util.Map;
|
|||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class CreateHashesTest extends BaseTest {
|
||||
public class CalculateHashesTest extends BaseTest {
|
||||
|
||||
@Test
|
||||
public void testCreateHashes() {
|
||||
|
@ -50,4 +50,36 @@ public class CreateHashesTest extends BaseTest {
|
|||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateHashesLargeNumber() {
|
||||
executeSql("create table HFJ_SPIDX_TOKEN (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_IDENTITY bigint, HASH_SYS bigint, HASH_SYS_AND_VALUE bigint, HASH_VALUE bigint, SP_SYSTEM varchar(200), SP_VALUE varchar(200), primary key (SP_ID))");
|
||||
|
||||
for (int i = 0; i < 777; i++) {
|
||||
executeSql("insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (false, 'identifier', 999, 'Patient', '2018-09-03 07:44:49.196', 'urn:oid:1.2.410.100110.10.41308301', '8888888" + i + "', " + i + ")");
|
||||
}
|
||||
|
||||
Long count = getConnectionProperties().getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
|
||||
return jdbcTemplate.queryForObject("SELECT count(*) FROM HFJ_SPIDX_TOKEN WHERE HASH_VALUE IS NULL", Long.class);
|
||||
});
|
||||
assertEquals(777L, count.longValue());
|
||||
|
||||
CalculateHashesTask task = new CalculateHashesTask();
|
||||
task.setTableName("HFJ_SPIDX_TOKEN");
|
||||
task.setColumnName("HASH_IDENTITY");
|
||||
task.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME")));
|
||||
task.addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM")));
|
||||
task.addCalculator("HASH_SYS_AND_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"), t.getString("SP_VALUE")));
|
||||
task.addCalculator("HASH_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashValue(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE")));
|
||||
task.setBatchSize(3);
|
||||
getMigrator().addTask(task);
|
||||
|
||||
getMigrator().migrate();
|
||||
|
||||
count = getConnectionProperties().getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
|
||||
return jdbcTemplate.queryForObject("SELECT count(*) FROM HFJ_SPIDX_TOKEN WHERE HASH_VALUE IS NULL", Long.class);
|
||||
});
|
||||
assertEquals(0L, count.longValue());
|
||||
}
|
||||
}
|
|
@ -215,16 +215,24 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi
|
|||
linkPrev = RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, theResult.getPreviousPageId(), theRequest.getParameters(), prettyPrint, theBundleType);
|
||||
}
|
||||
} else if (searchId != null) {
|
||||
int offset = theOffset + resourceList.size();
|
||||
|
||||
// We're doing offset pages
|
||||
if (numTotalResults == null || offset < numTotalResults) {
|
||||
linkNext = (RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, offset, numToReturn, theRequest.getParameters(), prettyPrint, theBundleType));
|
||||
if (numTotalResults == null || theOffset + numToReturn < numTotalResults) {
|
||||
linkNext = (RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, theOffset + numToReturn, numToReturn, theRequest.getParameters(), prettyPrint, theBundleType));
|
||||
}
|
||||
if (theOffset > 0) {
|
||||
int start = Math.max(0, theOffset - theLimit);
|
||||
linkPrev = RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, start, theLimit, theRequest.getParameters(), prettyPrint, theBundleType);
|
||||
}
|
||||
// int offset = theOffset + resourceList.size();
|
||||
//
|
||||
// // We're doing offset pages
|
||||
// if (numTotalResults == null || offset < numTotalResults) {
|
||||
// linkNext = (RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, offset, numToReturn, theRequest.getParameters(), prettyPrint, theBundleType));
|
||||
// }
|
||||
// if (theOffset > 0) {
|
||||
// int start = Math.max(0, theOffset - theLimit);
|
||||
// linkPrev = RestfulServerUtils.createPagingLink(theIncludes, serverBase, searchId, start, theLimit, theRequest.getParameters(), prettyPrint, theBundleType);
|
||||
// }
|
||||
}
|
||||
|
||||
bundleFactory.addRootPropertiesToBundle(theResult.getUuid(), serverBase, theLinkSelf, linkPrev, linkNext, theResult.size(), theBundleType, theResult.getPublished());
|
||||
|
|
8
pom.xml
8
pom.xml
|
@ -1727,6 +1727,12 @@
|
|||
<copy todir="target/site/apidocs-jpaserver">
|
||||
<fileset dir="hapi-fhir-jpaserver-base/target/site/apidocs"/>
|
||||
</copy>
|
||||
<copy todir="target/site/apidocs-client">
|
||||
<fileset dir="hapi-fhir-client/target/site/apidocs"/>
|
||||
</copy>
|
||||
<copy todir="target/site/apidocs-server">
|
||||
<fileset dir="hapi-fhir-server/target/site/apidocs"/>
|
||||
</copy>
|
||||
<copy todir="target/site/xref-jpaserver">
|
||||
<fileset dir="hapi-fhir-jpaserver-base/target/site/xref"/>
|
||||
</copy>
|
||||
|
@ -2131,6 +2137,8 @@
|
|||
<module>hapi-fhir-structures-dstu2</module>
|
||||
<module>hapi-fhir-structures-dstu3</module>
|
||||
<module>hapi-fhir-structures-r4</module>
|
||||
<module>hapi-fhir-client</module>
|
||||
<module>hapi-fhir-server</module>
|
||||
<module>hapi-fhir-jpaserver-base</module>
|
||||
<module>hapi-fhir-jaxrsserver-base</module>
|
||||
<!-- <module>hapi-fhir-cobertura</module> -->
|
||||
|
|
|
@ -142,6 +142,10 @@
|
|||
to use a parameter annotated with @ResourceParam to receive the Parameters
|
||||
(or other) resource supplied by the client as the request body.
|
||||
</action>
|
||||
<action type="add">
|
||||
The JPA server version migrator tool now runs in a multithreaded way, allowing it to
|
||||
upgrade th database faster when migration tasks require data updates.
|
||||
</action>
|
||||
</release>
|
||||
|
||||
<release version="3.5.0" date="2018-09-17">
|
||||
|
|
|
@ -115,6 +115,8 @@
|
|||
<item name="Model API (DSTU2)" href="./apidocs-dstu2/index.html" />
|
||||
<item name="Model API (DSTU3)" href="./apidocs-dstu3/index.html" />
|
||||
<item name="Model API (R4)" href="./apidocs-r4/index.html" />
|
||||
<item name="Client API" href="./apidocs-client/index.html" />
|
||||
<item name="Server API" href="./apidocs-server/index.html" />
|
||||
<item name="JPA Server API" href="./apidocs-jpaserver/index.html" />
|
||||
</item>
|
||||
<item name="Command Line Tool (hapi-fhir-cli)" href="./doc_cli.html" />
|
||||
|
|
|
@ -1,86 +1,88 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
|
||||
|
||||
<properties>
|
||||
<title>Documentation</title>
|
||||
<author email="jamesagnew@users.sourceforge.net">James Agnew</author>
|
||||
</properties>
|
||||
|
||||
<body>
|
||||
|
||||
<section name="Documentation">
|
||||
|
||||
<p>
|
||||
Welcome to HAPI FHIR! We hope that the documentation here will be
|
||||
helpful to you.
|
||||
</p>
|
||||
|
||||
<ul>
|
||||
<li><a href="./doc_intro.html">Introduction</a></li>
|
||||
<li><a href="./doc_dstu2.html">Note on DSTU2 Support / FHIR Versions</a></li>
|
||||
</ul>
|
||||
|
||||
<h4>The Data Model</h4>
|
||||
<ul>
|
||||
<li><a href="./doc_fhirobjects.html">Working with Resources</a></li>
|
||||
<li><a href="./doc_extensions.html">Profiles & Extensions</a></li>
|
||||
<li><a href="./doc_resource_references.html">Resource References</a></li>
|
||||
<li><a href="./doc_tags.html">Tags</a></li>
|
||||
<li><a href="./doc_validation.html">Validation</a></li>
|
||||
<li><a href="./doc_custom_structures.html">Custom Structures</a></li>
|
||||
<li><a href="./doc_converter.html">Version Converter</a></li>
|
||||
</ul>
|
||||
|
||||
<h4>RESTful Client</h4>
|
||||
<ul>
|
||||
<li><a href="./doc_rest_client.html">Fluent/Generic Client</a></li>
|
||||
<li><a href="./doc_rest_client_annotation.html">Annotation Client</a></li>
|
||||
<li><a href="./doc_rest_client_interceptor.html">Interceptors (client)</a></li>
|
||||
<li><a href="./doc_rest_client_http_config.html">Client Configuration</a></li>
|
||||
<li><a href="./doc_rest_client_examples.html">Client Examples</a></li>
|
||||
<li><a href="./doc_rest_client_alternate_provider.html">JAX-RS Client & Alternate HTTP Providers</a></li>
|
||||
</ul>
|
||||
|
||||
<h4>RESTful Server</h4>
|
||||
<ul>
|
||||
<li><a href="./doc_rest_server.html">Using RESTful Server</a></li>
|
||||
<li><a href="./doc_rest_operations.html">RESTful Operations</a></li>
|
||||
<li><a href="./doc_narrative.html">Narrative Generator</a></li>
|
||||
<li><a href="./doc_rest_server_interceptor.html">Interceptors (server)</a></li>
|
||||
<li><a href="./doc_rest_server_security.html">Security</a></li>
|
||||
<li><a href="./doc_cors.html">CORS Support</a></li>
|
||||
<li><a href="./doc_server_tester.html">Web Testing UI</a></li>
|
||||
<li><a href="./doc_rest_server_jaxrs.html">JAX-RS Support</a></li>
|
||||
</ul>
|
||||
|
||||
<h4>Other Features</h4>
|
||||
|
||||
<ul>
|
||||
<li><a href="./doc_logging.html">Logging</a></li>
|
||||
<li><a href="./doc_rest_etag.html">ETags</a></li>
|
||||
<li><a href="./doc_jpa.html">JPA/Database Server</a></li>
|
||||
<li><a href="./doc_tinder.html">Maven Plugin (hapi-tinder-plugin)</a></li>
|
||||
<li><a href="./doc_cli.html">Command Line Tool (hapi-fhir-cli)</a></li>
|
||||
|
||||
</ul>
|
||||
|
||||
<h4>JavaDocs</h4>
|
||||
<ul>
|
||||
<li><a href="./apidocs/index.html">Core API</a></li>
|
||||
<li><a href="./apidocs-dstu/index.html">Model API (DSTU1)</a></li>
|
||||
<li><a href="./apidocs-dstu2/index.html" >Model API (DSTU2)</a></li>
|
||||
<li><a href="./apidocs-dstu3/index.html" >Model API (STU3)</a></li>
|
||||
<li><a href="./apidocs-jpaserver/index.html">JPA Server API</a></li>
|
||||
</ul>
|
||||
|
||||
<h4>Source Cross Reference</h4>
|
||||
<ul>
|
||||
<li><a href="./xref-base/index.html">Core</a></li>
|
||||
<li><a href="./xref-jpaserver/index.html">JPA Server</a></li>
|
||||
</ul>
|
||||
|
||||
</section>
|
||||
|
||||
</body>
|
||||
|
||||
</document>
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<document xmlns="http://maven.apache.org/XDOC/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/XDOC/2.0 http://maven.apache.org/xsd/xdoc-2.0.xsd">
|
||||
|
||||
<properties>
|
||||
<title>Documentation</title>
|
||||
<author email="jamesagnew@users.sourceforge.net">James Agnew</author>
|
||||
</properties>
|
||||
|
||||
<body>
|
||||
|
||||
<section name="Documentation">
|
||||
|
||||
<p>
|
||||
Welcome to HAPI FHIR! We hope that the documentation here will be
|
||||
helpful to you.
|
||||
</p>
|
||||
|
||||
<ul>
|
||||
<li><a href="./doc_intro.html">Introduction</a></li>
|
||||
<li><a href="./doc_dstu2.html">Note on DSTU2 Support / FHIR Versions</a></li>
|
||||
</ul>
|
||||
|
||||
<h4>The Data Model</h4>
|
||||
<ul>
|
||||
<li><a href="./doc_fhirobjects.html">Working with Resources</a></li>
|
||||
<li><a href="./doc_extensions.html">Profiles & Extensions</a></li>
|
||||
<li><a href="./doc_resource_references.html">Resource References</a></li>
|
||||
<li><a href="./doc_tags.html">Tags</a></li>
|
||||
<li><a href="./doc_validation.html">Validation</a></li>
|
||||
<li><a href="./doc_custom_structures.html">Custom Structures</a></li>
|
||||
<li><a href="./doc_converter.html">Version Converter</a></li>
|
||||
</ul>
|
||||
|
||||
<h4>RESTful Client</h4>
|
||||
<ul>
|
||||
<li><a href="./doc_rest_client.html">Fluent/Generic Client</a></li>
|
||||
<li><a href="./doc_rest_client_annotation.html">Annotation Client</a></li>
|
||||
<li><a href="./doc_rest_client_interceptor.html">Interceptors (client)</a></li>
|
||||
<li><a href="./doc_rest_client_http_config.html">Client Configuration</a></li>
|
||||
<li><a href="./doc_rest_client_examples.html">Client Examples</a></li>
|
||||
<li><a href="./doc_rest_client_alternate_provider.html">JAX-RS Client & Alternate HTTP Providers</a></li>
|
||||
</ul>
|
||||
|
||||
<h4>RESTful Server</h4>
|
||||
<ul>
|
||||
<li><a href="./doc_rest_server.html">Using RESTful Server</a></li>
|
||||
<li><a href="./doc_rest_operations.html">RESTful Operations</a></li>
|
||||
<li><a href="./doc_narrative.html">Narrative Generator</a></li>
|
||||
<li><a href="./doc_rest_server_interceptor.html">Interceptors (server)</a></li>
|
||||
<li><a href="./doc_rest_server_security.html">Security</a></li>
|
||||
<li><a href="./doc_cors.html">CORS Support</a></li>
|
||||
<li><a href="./doc_server_tester.html">Web Testing UI</a></li>
|
||||
<li><a href="./doc_rest_server_jaxrs.html">JAX-RS Support</a></li>
|
||||
</ul>
|
||||
|
||||
<h4>Other Features</h4>
|
||||
|
||||
<ul>
|
||||
<li><a href="./doc_logging.html">Logging</a></li>
|
||||
<li><a href="./doc_rest_etag.html">ETags</a></li>
|
||||
<li><a href="./doc_jpa.html">JPA/Database Server</a></li>
|
||||
<li><a href="./doc_tinder.html">Maven Plugin (hapi-tinder-plugin)</a></li>
|
||||
<li><a href="./doc_cli.html">Command Line Tool (hapi-fhir-cli)</a></li>
|
||||
|
||||
</ul>
|
||||
|
||||
<h4>JavaDocs</h4>
|
||||
<ul>
|
||||
<li><a href="./apidocs/index.html">Core API</a></li>
|
||||
<li><a href="./apidocs-dstu/index.html">Model API (DSTU1)</a></li>
|
||||
<li><a href="./apidocs-dstu2/index.html" >Model API (DSTU2)</a></li>
|
||||
<li><a href="./apidocs-dstu3/index.html" >Model API (STU3)</a></li>
|
||||
<li><a href="./apidocs-client/index.html" >Client API</a></li>
|
||||
<li><a href="./apidocs-server/index.html" >Server API</a></li>
|
||||
<li><a href="./apidocs-jpaserver/index.html">JPA Server API</a></li>
|
||||
</ul>
|
||||
|
||||
<h4>Source Cross Reference</h4>
|
||||
<ul>
|
||||
<li><a href="./xref-base/index.html">Core</a></li>
|
||||
<li><a href="./xref-jpaserver/index.html">JPA Server</a></li>
|
||||
</ul>
|
||||
|
||||
</section>
|
||||
|
||||
</body>
|
||||
|
||||
</document>
|
||||
|
|
Loading…
Reference in New Issue