diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java index d10613d01a3..1a0d781e921 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/SearchTotalModeEnum.java @@ -1,5 +1,25 @@ package ca.uhn.fhir.rest.api; +/*- + * #%L + * HAPI FHIR - Core Library + * %% + * Copyright (C) 2014 - 2018 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + import java.util.HashMap; import java.util.Map; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParameterMap.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParameterMap.java index f9c38feeed7..111656582d0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParameterMap.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchParameterMap.java @@ -28,9 +28,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-migrate/pom.xml b/hapi-fhir-jpaserver-migrate/pom.xml index a804fc80a9d..2618d55c8c7 100644 --- a/hapi-fhir-jpaserver-migrate/pom.xml +++ b/hapi-fhir-jpaserver-migrate/pom.xml @@ -31,6 +31,10 @@ org.springframework spring-jdbc + + org.apache.commons + commons-dbcp2 + @@ -45,11 +49,6 @@ derby test - - org.apache.commons - commons-dbcp2 - test - junit junit diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java index 8e5cba77e1d..c3d8a8725ba 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/DriverTypeEnum.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.migrate; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -76,20 +77,13 @@ public enum DriverTypeEnum { throw new InternalErrorException("Unable to find driver class: " + myDriverClassName, e); } - SingleConnectionDataSource dataSource = new SingleConnectionDataSource(){ - @Override - protected Connection getConnectionFromDriver(Properties props) throws SQLException { - Connection connect = driver.connect(theUrl, props); - assert connect != null; - return connect; - } - }; - dataSource.setAutoCommit(false); + BasicDataSource dataSource = new BasicDataSource(); +// dataSource.setAutoCommit(false); dataSource.setDriverClassName(myDriverClassName); dataSource.setUrl(theUrl); dataSource.setUsername(theUsername); dataSource.setPassword(thePassword); - dataSource.setSuppressClose(true); +// dataSource.setSuppressClose(true); DataSourceTransactionManager transactionManager = new DataSourceTransactionManager(); transactionManager.setDataSource(dataSource); diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java index 620792a4754..0300e92fa32 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java @@ -23,15 +23,21 @@ package ca.uhn.fhir.jpa.migrate.taskdef; import ca.uhn.fhir.util.StopWatch; import com.google.common.collect.ForwardingMap; import org.apache.commons.lang3.Validate; +import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.checkerframework.checker.nullness.compatqual.NullableDecl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.jdbc.core.ColumnMapRowMapper; import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowCallbackHandler; +import java.sql.ResultSet; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.*; import java.util.function.Function; public class CalculateHashesTask extends BaseTableColumnTask { @@ -39,75 +45,147 @@ public class CalculateHashesTask extends BaseTableColumnTask, Long>> myCalculators = new HashMap<>(); + private ThreadPoolExecutor myExecutor; public void setBatchSize(int theBatchSize) { myBatchSize = theBatchSize; } + /** + * Constructor + */ + public CalculateHashesTask() { + super(); + } @Override - public void execute() { + public synchronized void execute() throws SQLException { if (isDryRun()) { return; } - List> rows; - do { - rows = getTxTemplate().execute(t -> { - JdbcTemplate jdbcTemplate = newJdbcTemnplate(); - jdbcTemplate.setMaxRows(myBatchSize); - String sql = "SELECT * FROM " + getTableName() + " WHERE " + getColumnName() + " IS NULL"; - ourLog.info("Finding up to {} rows in {} that requires hashes", myBatchSize, getTableName()); - return jdbcTemplate.queryForList(sql); - }); + initializeExecutor(); + try { - updateRows(rows); - } while (rows.size() > 0); - } + while(true) { + MyRowCallbackHandler rch = new MyRowCallbackHandler(); + getTxTemplate().execute(t -> { + JdbcTemplate jdbcTemplate = newJdbcTemnplate(); + jdbcTemplate.setMaxRows(100000); + String sql = "SELECT * FROM " + getTableName() + " WHERE " + getColumnName() + " IS NULL"; + ourLog.info("Finding up to {} rows in {} that requires hashes", myBatchSize, getTableName()); - private void updateRows(List> theRows) { - StopWatch sw = new StopWatch(); - getTxTemplate().execute(t -> { + jdbcTemplate.query(sql, rch); + rch.done(); - // Loop through rows - assert theRows != null; - for (Map nextRow : theRows) { + return null; + }); - Map newValues = new HashMap<>(); - MandatoryKeyMap nextRowMandatoryKeyMap = new MandatoryKeyMap<>(nextRow); - - // Apply calculators - for (Map.Entry, Long>> nextCalculatorEntry : myCalculators.entrySet()) { - String nextColumn = nextCalculatorEntry.getKey(); - Function, Long> nextCalculator = nextCalculatorEntry.getValue(); - Long value = nextCalculator.apply(nextRowMandatoryKeyMap); - newValues.put(nextColumn, value); + rch.submitNext(); + List> futures = rch.getFutures(); + if (futures.isEmpty()) { + break; } - // Generate update SQL - StringBuilder sqlBuilder = new StringBuilder(); - List arguments = new ArrayList<>(); - sqlBuilder.append("UPDATE "); - sqlBuilder.append(getTableName()); - sqlBuilder.append(" SET "); - for (Map.Entry nextNewValueEntry : newValues.entrySet()) { - if (arguments.size() > 0) { - sqlBuilder.append(", "); + ourLog.info("Waiting for {} tasks to complete", futures.size()); + for (Future> next : futures) { + try { + next.get(); + } catch (Exception e) { + throw new SQLException(e); } - sqlBuilder.append(nextNewValueEntry.getKey()).append(" = ?"); - arguments.add(nextNewValueEntry.getValue()); } - sqlBuilder.append(" WHERE SP_ID = ?"); - arguments.add((Long) nextRow.get("SP_ID")); - - // Apply update SQL - newJdbcTemnplate().update(sqlBuilder.toString(), arguments.toArray()); } - return theRows.size(); - }); - ourLog.info("Updated {} rows on {} in {}", theRows.size(), getTableName(), sw.toString()); + } finally { + destroyExecutor(); + } + } + + private void destroyExecutor() { + myExecutor.shutdownNow(); + } + + private void initializeExecutor() { + int maximumPoolSize = Runtime.getRuntime().availableProcessors(); + + LinkedBlockingQueue executorQueue = new LinkedBlockingQueue<>(maximumPoolSize); + BasicThreadFactory threadFactory = new BasicThreadFactory.Builder() + .namingPattern("worker-" + "-%d") + .daemon(false) + .priority(Thread.NORM_PRIORITY) + .build(); + RejectedExecutionHandler rejectedExecutionHandler = new RejectedExecutionHandler() { + @Override + public void rejectedExecution(Runnable theRunnable, ThreadPoolExecutor theExecutor) { + ourLog.info("Note: Executor queue is full ({} elements), waiting for a slot to become available!", executorQueue.size()); + StopWatch sw = new StopWatch(); + try { + executorQueue.put(theRunnable); + } catch (InterruptedException theE) { + throw new RejectedExecutionException("Task " + theRunnable.toString() + + " rejected from " + theE.toString()); + } + ourLog.info("Slot become available after {}ms", sw.getMillis()); + } + }; + myExecutor = new ThreadPoolExecutor( + 1, + maximumPoolSize, + 0L, + TimeUnit.MILLISECONDS, + executorQueue, + threadFactory, + rejectedExecutionHandler); + } + + private Future> updateRows(List> theRows) { + Runnable task = () -> { + StopWatch sw = new StopWatch(); + getTxTemplate().execute(t -> { + + // Loop through rows + assert theRows != null; + for (Map nextRow : theRows) { + + Map newValues = new HashMap<>(); + MandatoryKeyMap nextRowMandatoryKeyMap = new MandatoryKeyMap<>(nextRow); + + // Apply calculators + for (Map.Entry, Long>> nextCalculatorEntry : myCalculators.entrySet()) { + String nextColumn = nextCalculatorEntry.getKey(); + Function, Long> nextCalculator = nextCalculatorEntry.getValue(); + Long value = nextCalculator.apply(nextRowMandatoryKeyMap); + newValues.put(nextColumn, value); + } + + // Generate update SQL + StringBuilder sqlBuilder = new StringBuilder(); + List arguments = new ArrayList<>(); + sqlBuilder.append("UPDATE "); + sqlBuilder.append(getTableName()); + sqlBuilder.append(" SET "); + for (Map.Entry nextNewValueEntry : newValues.entrySet()) { + if (arguments.size() > 0) { + sqlBuilder.append(", "); + } + sqlBuilder.append(nextNewValueEntry.getKey()).append(" = ?"); + arguments.add(nextNewValueEntry.getValue()); + } + sqlBuilder.append(" WHERE SP_ID = ?"); + arguments.add((Long) nextRow.get("SP_ID")); + + // Apply update SQL + newJdbcTemnplate().update(sqlBuilder.toString(), arguments.toArray()); + + } + + return theRows.size(); + }); + ourLog.info("Updated {} rows on {} in {}", theRows.size(), getTableName(), sw.toString()); + }; + return myExecutor.submit(task); } public CalculateHashesTask addCalculator(String theColumnName, Function, Long> theConsumer) { @@ -116,6 +194,39 @@ public class CalculateHashesTask extends BaseTableColumnTask> myRows = new ArrayList<>(); + private List> myFutures = new ArrayList<>(); + + @Override + public void processRow(ResultSet rs) throws SQLException { + Map row = new ColumnMapRowMapper().mapRow(rs, 0); + myRows.add(row); + + if (myRows.size() >= myBatchSize) { + submitNext(); + } + } + + private void submitNext() { + if (myRows.size() > 0) { + myFutures.add(updateRows(myRows)); + myRows = new ArrayList<>(); + } + } + + public List> getFutures() { + return myFutures; + } + + public void done() { + if (myRows.size() > 0) { + submitNext(); + } + } + } + public static class MandatoryKeyMap extends ForwardingMap { diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CreateHashesTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTest.java similarity index 56% rename from hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CreateHashesTest.java rename to hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTest.java index a5140a72b83..6e15f8734be 100644 --- a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CreateHashesTest.java +++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTest.java @@ -9,7 +9,7 @@ import java.util.Map; import static org.junit.Assert.assertEquals; -public class CreateHashesTest extends BaseTest { +public class CalculateHashesTest extends BaseTest { @Test public void testCreateHashes() { @@ -50,4 +50,36 @@ public class CreateHashesTest extends BaseTest { }); } + @Test + public void testCreateHashesLargeNumber() { + executeSql("create table HFJ_SPIDX_TOKEN (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_IDENTITY bigint, HASH_SYS bigint, HASH_SYS_AND_VALUE bigint, HASH_VALUE bigint, SP_SYSTEM varchar(200), SP_VALUE varchar(200), primary key (SP_ID))"); + + for (int i = 0; i < 777; i++) { + executeSql("insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (false, 'identifier', 999, 'Patient', '2018-09-03 07:44:49.196', 'urn:oid:1.2.410.100110.10.41308301', '8888888" + i + "', " + i + ")"); + } + + Long count = getConnectionProperties().getTxTemplate().execute(t -> { + JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate(); + return jdbcTemplate.queryForObject("SELECT count(*) FROM HFJ_SPIDX_TOKEN WHERE HASH_VALUE IS NULL", Long.class); + }); + assertEquals(777L, count.longValue()); + + CalculateHashesTask task = new CalculateHashesTask(); + task.setTableName("HFJ_SPIDX_TOKEN"); + task.setColumnName("HASH_IDENTITY"); + task.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME"))); + task.addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"))); + task.addCalculator("HASH_SYS_AND_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"), t.getString("SP_VALUE"))); + task.addCalculator("HASH_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashValue(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE"))); + task.setBatchSize(3); + getMigrator().addTask(task); + + getMigrator().migrate(); + + count = getConnectionProperties().getTxTemplate().execute(t -> { + JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate(); + return jdbcTemplate.queryForObject("SELECT count(*) FROM HFJ_SPIDX_TOKEN WHERE HASH_VALUE IS NULL", Long.class); + }); + assertEquals(0L, count.longValue()); + } } diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java index 5f9f0e809c0..2bb73bee73d 100644 --- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java +++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/method/SearchTotalModeParameter.java @@ -18,9 +18,9 @@ import java.util.Collection; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/pom.xml b/pom.xml index 50cc673a3af..d5ff60b05fb 100644 --- a/pom.xml +++ b/pom.xml @@ -1727,6 +1727,12 @@ + + + + + + @@ -2131,6 +2137,8 @@ hapi-fhir-structures-dstu2 hapi-fhir-structures-dstu3 hapi-fhir-structures-r4 + hapi-fhir-client + hapi-fhir-server hapi-fhir-jpaserver-base hapi-fhir-jaxrsserver-base diff --git a/src/changes/changes.xml b/src/changes/changes.xml index e06eb146ba0..f71138cf5b9 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -100,6 +100,10 @@ permission is granted. This has been corrected so that transaction() allows both batch and transaction requests to proceed. + + The JPA server version migrator tool now runs in a multithreaded way, allowing it to + upgrade th database faster when migration tasks require data updates. + diff --git a/src/site/site.xml b/src/site/site.xml index 2c5ca40def0..d0e75165cc7 100644 --- a/src/site/site.xml +++ b/src/site/site.xml @@ -115,6 +115,8 @@ + + diff --git a/src/site/xdoc/docindex.xml b/src/site/xdoc/docindex.xml index 49cefb6481c..226d7e8b457 100644 --- a/src/site/xdoc/docindex.xml +++ b/src/site/xdoc/docindex.xml @@ -1,86 +1,88 @@ - - - - - Documentation - James Agnew - - - - - - - - Welcome to HAPI FHIR! We hope that the documentation here will be - helpful to you. - - - - Introduction - Note on DSTU2 Support / FHIR Versions - - - The Data Model - - Working with Resources - Profiles & Extensions - Resource References - Tags - Validation - Custom Structures - Version Converter - - - RESTful Client - - Fluent/Generic Client - Annotation Client - Interceptors (client) - Client Configuration - Client Examples - JAX-RS Client & Alternate HTTP Providers - - - RESTful Server - - Using RESTful Server - RESTful Operations - Narrative Generator - Interceptors (server) - Security - CORS Support - Web Testing UI - JAX-RS Support - - - Other Features - - - Logging - ETags - JPA/Database Server - Maven Plugin (hapi-tinder-plugin) - Command Line Tool (hapi-fhir-cli) - - - - JavaDocs - - Core API - Model API (DSTU1) - Model API (DSTU2) - Model API (STU3) - JPA Server API - - - Source Cross Reference - - Core - JPA Server - - - - - - - + + + + + Documentation + James Agnew + + + + + + + + Welcome to HAPI FHIR! We hope that the documentation here will be + helpful to you. + + + + Introduction + Note on DSTU2 Support / FHIR Versions + + + The Data Model + + Working with Resources + Profiles & Extensions + Resource References + Tags + Validation + Custom Structures + Version Converter + + + RESTful Client + + Fluent/Generic Client + Annotation Client + Interceptors (client) + Client Configuration + Client Examples + JAX-RS Client & Alternate HTTP Providers + + + RESTful Server + + Using RESTful Server + RESTful Operations + Narrative Generator + Interceptors (server) + Security + CORS Support + Web Testing UI + JAX-RS Support + + + Other Features + + + Logging + ETags + JPA/Database Server + Maven Plugin (hapi-tinder-plugin) + Command Line Tool (hapi-fhir-cli) + + + + JavaDocs + + Core API + Model API (DSTU1) + Model API (DSTU2) + Model API (STU3) + Client API + Server API + JPA Server API + + + Source Cross Reference + + Core + JPA Server + + + + + + +
- Welcome to HAPI FHIR! We hope that the documentation here will be - helpful to you. -
+ Welcome to HAPI FHIR! We hope that the documentation here will be + helpful to you. +