Merge remote-tracking branch 'origin/master' into issue-2445-support-patient-level-export
This commit is contained in:
commit
e09741cbf6
|
@ -608,6 +608,11 @@ public class DaoConfig {
|
||||||
* <code>_sort</code> parameter on searches): If the server is configured
|
* <code>_sort</code> parameter on searches): If the server is configured
|
||||||
* to not index missing field.
|
* to not index missing field.
|
||||||
* </p>
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* The following index may need to be added into the indexed tables such as <code>HFJ_SPIDX_TOKEN</code>
|
||||||
|
* to improve the search performance while <code>:missing</code> is enabled.
|
||||||
|
* <code>RES_TYPE, SP_NAME, SP_MISSING</code>
|
||||||
|
* </p>
|
||||||
*/
|
*/
|
||||||
public void setIndexMissingFields(IndexEnabledEnum theIndexMissingFields) {
|
public void setIndexMissingFields(IndexEnabledEnum theIndexMissingFields) {
|
||||||
Validate.notNull(theIndexMissingFields, "theIndexMissingFields must not be null");
|
Validate.notNull(theIndexMissingFields, "theIndexMissingFields must not be null");
|
||||||
|
|
|
@ -151,6 +151,7 @@ import org.springframework.scheduling.concurrent.ScheduledExecutorFactoryBean;
|
||||||
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
import javax.annotation.PostConstruct;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -200,6 +201,29 @@ public abstract class BaseConfig {
|
||||||
@Autowired
|
@Autowired
|
||||||
private DaoRegistry myDaoRegistry;
|
private DaoRegistry myDaoRegistry;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Subclasses may override this method to provide settings such as search coordinator pool sizes.
|
||||||
|
*/
|
||||||
|
@PostConstruct
|
||||||
|
public void initSettings() {}
|
||||||
|
|
||||||
|
private Integer searchCoordCorePoolSize = 20;
|
||||||
|
private Integer searchCoordMaxPoolSize = 100;
|
||||||
|
private Integer searchCoordQueueCapacity = 200;
|
||||||
|
|
||||||
|
public void setSearchCoordCorePoolSize(Integer searchCoordCorePoolSize) {
|
||||||
|
this.searchCoordCorePoolSize = searchCoordCorePoolSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSearchCoordMaxPoolSize(Integer searchCoordMaxPoolSize) {
|
||||||
|
this.searchCoordMaxPoolSize = searchCoordMaxPoolSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSearchCoordQueueCapacity(Integer searchCoordQueueCapacity) {
|
||||||
|
this.searchCoordQueueCapacity = searchCoordQueueCapacity;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public BatchConfigurer batchConfigurer() {
|
public BatchConfigurer batchConfigurer() {
|
||||||
return new NonPersistedBatchConfigurer();
|
return new NonPersistedBatchConfigurer();
|
||||||
|
@ -316,6 +340,9 @@ public abstract class BaseConfig {
|
||||||
public ThreadPoolTaskExecutor searchCoordinatorThreadFactory() {
|
public ThreadPoolTaskExecutor searchCoordinatorThreadFactory() {
|
||||||
final ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
|
final ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
|
||||||
threadPoolTaskExecutor.setThreadNamePrefix("search_coord_");
|
threadPoolTaskExecutor.setThreadNamePrefix("search_coord_");
|
||||||
|
threadPoolTaskExecutor.setCorePoolSize(searchCoordCorePoolSize);
|
||||||
|
threadPoolTaskExecutor.setMaxPoolSize(searchCoordMaxPoolSize);
|
||||||
|
threadPoolTaskExecutor.setQueueCapacity(searchCoordQueueCapacity);
|
||||||
threadPoolTaskExecutor.initialize();
|
threadPoolTaskExecutor.initialize();
|
||||||
return threadPoolTaskExecutor;
|
return threadPoolTaskExecutor;
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,9 +71,23 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
init501(); // 20200514 - 20200515
|
init501(); // 20200514 - 20200515
|
||||||
init510(); // 20200516 - 20201028
|
init510(); // 20200516 - 20201028
|
||||||
init520(); // 20201029 -
|
init520(); // 20201029 -
|
||||||
init530();
|
init530();
|
||||||
|
init540(); // 20210218 -
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void init540() {
|
||||||
|
|
||||||
|
Builder version = forVersion(VersionEnum.V5_4_0);
|
||||||
|
|
||||||
|
//-- add index on HFJ_SPIDX_DATE
|
||||||
|
version.onTable("HFJ_SPIDX_DATE").addIndex("20210309.1", "IDX_SP_DATE_HASH_HIGH")
|
||||||
|
.unique(false).withColumns("HASH_IDENTITY", "SP_VALUE_HIGH");
|
||||||
|
|
||||||
|
//-- add index on HFJ_FORCED_ID
|
||||||
|
version.onTable("HFJ_FORCED_ID").addIndex("20210309.2", "IDX_FORCEID_FID")
|
||||||
|
.unique(false).withColumns("FORCED_ID");
|
||||||
|
}
|
||||||
|
|
||||||
private void init530() {
|
private void init530() {
|
||||||
Builder version = forVersion(VersionEnum.V5_3_0);
|
Builder version = forVersion(VersionEnum.V5_3_0);
|
||||||
|
|
||||||
|
@ -126,6 +140,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
// HFJ_RES_LINK
|
// HFJ_RES_LINK
|
||||||
version.onTable("HFJ_RES_LINK")
|
version.onTable("HFJ_RES_LINK")
|
||||||
.addColumn("20210126.1", "TARGET_RESOURCE_VERSION").nullable().type(ColumnTypeEnum.LONG);
|
.addColumn("20210126.1", "TARGET_RESOURCE_VERSION").nullable().type(ColumnTypeEnum.LONG);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void init520() {
|
protected void init520() {
|
||||||
|
|
|
@ -31,6 +31,7 @@ import javax.persistence.ForeignKey;
|
||||||
import javax.persistence.GeneratedValue;
|
import javax.persistence.GeneratedValue;
|
||||||
import javax.persistence.GenerationType;
|
import javax.persistence.GenerationType;
|
||||||
import javax.persistence.Id;
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.Index;
|
||||||
import javax.persistence.JoinColumn;
|
import javax.persistence.JoinColumn;
|
||||||
import javax.persistence.OneToOne;
|
import javax.persistence.OneToOne;
|
||||||
import javax.persistence.SequenceGenerator;
|
import javax.persistence.SequenceGenerator;
|
||||||
|
@ -48,6 +49,7 @@ import javax.persistence.UniqueConstraint;
|
||||||
* - IDX_FORCEDID_TYPE_RESID
|
* - IDX_FORCEDID_TYPE_RESID
|
||||||
* so don't reuse these names
|
* so don't reuse these names
|
||||||
*/
|
*/
|
||||||
|
@Index(name = "IDX_FORCEID_FID", columnList = "FORCED_ID")
|
||||||
})
|
})
|
||||||
public class ForcedId extends BasePartitionable {
|
public class ForcedId extends BasePartitionable {
|
||||||
|
|
||||||
|
|
|
@ -55,6 +55,7 @@ import java.util.Date;
|
||||||
// We previously had an index called IDX_SP_DATE - Dont reuse
|
// We previously had an index called IDX_SP_DATE - Dont reuse
|
||||||
@Index(name = "IDX_SP_DATE_HASH", columnList = "HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH"),
|
@Index(name = "IDX_SP_DATE_HASH", columnList = "HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH"),
|
||||||
@Index(name = "IDX_SP_DATE_HASH_LOW", columnList = "HASH_IDENTITY,SP_VALUE_LOW"),
|
@Index(name = "IDX_SP_DATE_HASH_LOW", columnList = "HASH_IDENTITY,SP_VALUE_LOW"),
|
||||||
|
@Index(name = "IDX_SP_DATE_HASH_HIGH", columnList = "HASH_IDENTITY,SP_VALUE_HIGH"),
|
||||||
@Index(name = "IDX_SP_DATE_ORD_HASH", columnList = "HASH_IDENTITY,SP_VALUE_LOW_DATE_ORDINAL,SP_VALUE_HIGH_DATE_ORDINAL"),
|
@Index(name = "IDX_SP_DATE_ORD_HASH", columnList = "HASH_IDENTITY,SP_VALUE_LOW_DATE_ORDINAL,SP_VALUE_HIGH_DATE_ORDINAL"),
|
||||||
@Index(name = "IDX_SP_DATE_ORD_HASH_LOW", columnList = "HASH_IDENTITY,SP_VALUE_LOW_DATE_ORDINAL"),
|
@Index(name = "IDX_SP_DATE_ORD_HASH_LOW", columnList = "HASH_IDENTITY,SP_VALUE_LOW_DATE_ORDINAL"),
|
||||||
@Index(name = "IDX_SP_DATE_RESID", columnList = "RES_ID"),
|
@Index(name = "IDX_SP_DATE_RESID", columnList = "RES_ID"),
|
||||||
|
|
Loading…
Reference in New Issue