Mb new token search index (#3386)

* New token search param indexing.
* New token query test cases for sql extraction
* Retire JPA fk ugly nams
This commit is contained in:
michaelabuckley 2022-03-10 19:51:53 -05:00 committed by GitHub
parent dfd99c5471
commit 80d1a5a6f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 528 additions and 217 deletions

View File

@ -285,7 +285,7 @@ public class TestUtil {
Validate.notNull(fk); Validate.notNull(fk);
Validate.isTrue(isNotBlank(fk.name()), "Foreign key on " + theAnnotatedElement + " has no name()"); Validate.isTrue(isNotBlank(fk.name()), "Foreign key on " + theAnnotatedElement + " has no name()");
List<String> legacySPHibernateFKNames = Arrays.asList( List<String> legacySPHibernateFKNames = Arrays.asList(
"FK7ULX3J1GG3V7MAQREJGC7YBC4", "FKC97MPK37OKWU8QVTCEG2NH9VN", "FKCLTIHNC5TGPRJ9BHPT7XI5OTB", "FKGXSREUTYMMFJUWDSWV3Y887DO"); "FKC97MPK37OKWU8QVTCEG2NH9VN", "FKCLTIHNC5TGPRJ9BHPT7XI5OTB", "FKGXSREUTYMMFJUWDSWV3Y887DO");
if (legacySPHibernateFKNames.contains(fk.name())) { if (legacySPHibernateFKNames.contains(fk.name())) {
// wipmb temporarily allow the hibernate legacy sp fk names // wipmb temporarily allow the hibernate legacy sp fk names
} else { } else {

View File

@ -87,17 +87,21 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
private void init600() { private void init600() {
Builder version = forVersion(VersionEnum.V6_0_0); Builder version = forVersion(VersionEnum.V6_0_0);
/* /**
* New indexing for the core SPIDX tables. * New indexing for the core SPIDX tables.
* Ensure all queries can be satisfied by the index directly, * Ensure all queries can be satisfied by the index directly,
* either as left or right table in a hash or sort join. * either as left or right table in a hash or sort join.
*
* new date search indexing
* @see ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder
* @see ResourceIndexedSearchParamDate
*/ */
// new date search indexes {
Builder.BuilderWithTableName dateTable = version.onTable("HFJ_SPIDX_DATE"); Builder.BuilderWithTableName dateTable = version.onTable("HFJ_SPIDX_DATE");
// replace and drop IDX_SP_DATE_HASH // replace and drop IDX_SP_DATE_HASH
dateTable dateTable
.addIndex("20220207.1", "IDX_SP_DATE_HASH_V2" ) .addIndex("20220207.1", "IDX_SP_DATE_HASH_V2")
.unique(false) .unique(false)
.online(true) .online(true)
.withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH", "RES_ID", "PARTITION_ID"); .withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH", "RES_ID", "PARTITION_ID");
@ -108,7 +112,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// replace and drop IDX_SP_DATE_HASH_HIGH // replace and drop IDX_SP_DATE_HASH_HIGH
dateTable dateTable
.addIndex("20220207.4", "IDX_SP_DATE_HASH_HIGH_V2" ) .addIndex("20220207.4", "IDX_SP_DATE_HASH_HIGH_V2")
.unique(false) .unique(false)
.online(true) .online(true)
.withColumns("HASH_IDENTITY", "SP_VALUE_HIGH", "RES_ID", "PARTITION_ID"); .withColumns("HASH_IDENTITY", "SP_VALUE_HIGH", "RES_ID", "PARTITION_ID");
@ -116,7 +120,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// replace and drop IDX_SP_DATE_ORD_HASH // replace and drop IDX_SP_DATE_ORD_HASH
dateTable dateTable
.addIndex("20220207.6", "IDX_SP_DATE_ORD_HASH_V2" ) .addIndex("20220207.6", "IDX_SP_DATE_ORD_HASH_V2")
.unique(false) .unique(false)
.online(true) .online(true)
.withColumns("HASH_IDENTITY", "SP_VALUE_LOW_DATE_ORDINAL", "SP_VALUE_HIGH_DATE_ORDINAL", "RES_ID", "PARTITION_ID"); .withColumns("HASH_IDENTITY", "SP_VALUE_LOW_DATE_ORDINAL", "SP_VALUE_HIGH_DATE_ORDINAL", "RES_ID", "PARTITION_ID");
@ -124,7 +128,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// replace and drop IDX_SP_DATE_ORD_HASH_HIGH // replace and drop IDX_SP_DATE_ORD_HASH_HIGH
dateTable dateTable
.addIndex("20220207.8", "IDX_SP_DATE_ORD_HASH_HIGH_V2" ) .addIndex("20220207.8", "IDX_SP_DATE_ORD_HASH_HIGH_V2")
.unique(false) .unique(false)
.online(true) .online(true)
.withColumns("HASH_IDENTITY", "SP_VALUE_HIGH_DATE_ORDINAL", "RES_ID", "PARTITION_ID"); .withColumns("HASH_IDENTITY", "SP_VALUE_HIGH_DATE_ORDINAL", "RES_ID", "PARTITION_ID");
@ -135,7 +139,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// replace and drop IDX_SP_DATE_RESID // replace and drop IDX_SP_DATE_RESID
dateTable dateTable
.addIndex("20220207.11", "IDX_SP_DATE_RESID_V2" ) .addIndex("20220207.11", "IDX_SP_DATE_RESID_V2")
.unique(false) .unique(false)
.online(true) .online(true)
.withColumns("RES_ID", "HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH", "SP_VALUE_LOW_DATE_ORDINAL", "SP_VALUE_HIGH_DATE_ORDINAL", "PARTITION_ID"); .withColumns("RES_ID", "HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH", "SP_VALUE_LOW_DATE_ORDINAL", "SP_VALUE_HIGH_DATE_ORDINAL", "PARTITION_ID");
@ -151,6 +155,70 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
// drop obsolete // drop obsolete
dateTable.dropIndexOnline("20220207.16", "IDX_SP_DATE_UPDATED"); dateTable.dropIndexOnline("20220207.16", "IDX_SP_DATE_UPDATED");
}
/**
* new token search indexing
* @see ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder
* @see ResourceIndexedSearchParamToken
*/
{
Builder.BuilderWithTableName tokenTable = version.onTable("HFJ_SPIDX_TOKEN");
// replace and drop IDX_SP_DATE_HASH for sorting
tokenTable
.addIndex("20220208.1", "IDX_SP_TOKEN_HASH_V2")
.unique(false).online(true)
.withColumns("HASH_IDENTITY", "SP_SYSTEM", "SP_VALUE", "RES_ID", "PARTITION_ID");
tokenTable.dropIndexOnline("20220208.2", "IDX_SP_TOKEN_HASH");
// for search by system
tokenTable
.addIndex("20220208.3", "IDX_SP_TOKEN_HASH_S_V2")
.unique(false).online(true)
.withColumns("HASH_SYS", "RES_ID", "PARTITION_ID");
tokenTable.dropIndexOnline("20220208.4", "IDX_SP_TOKEN_HASH_S");
// for search by system+value
tokenTable
.addIndex("20220208.5", "IDX_SP_TOKEN_HASH_SV_V2")
.unique(false).online(true)
.withColumns("HASH_SYS_AND_VALUE", "RES_ID", "PARTITION_ID");
tokenTable.dropIndexOnline("20220208.6", "IDX_SP_TOKEN_HASH_SV");
// for search by value
tokenTable
.addIndex("20220208.7", "IDX_SP_TOKEN_HASH_V_V2")
.unique(false).online(true)
.withColumns("HASH_VALUE", "RES_ID", "PARTITION_ID");
tokenTable.dropIndexOnline("20220208.8", "IDX_SP_TOKEN_HASH_V");
// obsolete. We're dropping this column.
tokenTable.dropIndexOnline("20220208.9", "IDX_SP_TOKEN_UPDATED");
// for joining as second table:
{
// replace and drop IDX_SP_TOKEN_RESID, and the associated fk constraint
tokenTable
.addIndex("20220208.10", "IDX_SP_TOKEN_RESID_V2")
.unique(false).online(true)
.withColumns("RES_ID", "HASH_SYS_AND_VALUE", "HASH_VALUE", "HASH_SYS", "HASH_IDENTITY", "PARTITION_ID");
// some engines tie the FK constraint to a particular index.
// So we need to drop and recreate the constraint to drop the old RES_ID index.
// Rename it while we're at it. FK7ULX3J1GG3V7MAQREJGC7YBC4 was not a pretty name.
tokenTable.dropForeignKey("20220208.11", "FK7ULX3J1GG3V7MAQREJGC7YBC4", "HFJ_RESOURCE");
tokenTable.dropIndexOnline("20220208.12", "IDX_SP_TOKEN_RESID");
tokenTable.dropIndexOnline("20220208.13", "FK7ULX3J1GG3V7MAQREJGC7YBC4");
tokenTable.addForeignKey("20220208.14", "FK_SP_TOKEN_RES")
.toColumn("RES_ID").references("HFJ_RESOURCE", "RES_ID");
}
}
// fix for https://github.com/hapifhir/hapi-fhir/issues/3316 // fix for https://github.com/hapifhir/hapi-fhir/issues/3316
// index must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically // index must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
@ -201,6 +269,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
batchChunk.addColumn("ERROR_COUNT").nonNullable().type(ColumnTypeEnum.INT); batchChunk.addColumn("ERROR_COUNT").nonNullable().type(ColumnTypeEnum.INT);
batchChunk.addIndex("20220227.4", "IDX_BT2WC_II_SEQ").unique(false).withColumns("INSTANCE_ID", "SEQ"); batchChunk.addIndex("20220227.4", "IDX_BT2WC_II_SEQ").unique(false).withColumns("INSTANCE_ID", "SEQ");
batchChunk.addForeignKey("20220227.5", "FK_BT2WC_INSTANCE").toColumn("INSTANCE_ID").references("BT2_JOB_INSTANCE", "ID"); batchChunk.addForeignKey("20220227.5", "FK_BT2WC_INSTANCE").toColumn("INSTANCE_ID").references("BT2_JOB_INSTANCE", "ID");
} }
/** /**
@ -1298,19 +1367,23 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
spidxToken spidxToken
.addIndex("20180903.35", "IDX_SP_TOKEN_HASH") .addIndex("20180903.35", "IDX_SP_TOKEN_HASH")
.unique(false) .unique(false)
.withColumns("HASH_IDENTITY"); .withColumns("HASH_IDENTITY")
.doNothing();
spidxToken spidxToken
.addIndex("20180903.36", "IDX_SP_TOKEN_HASH_S") .addIndex("20180903.36", "IDX_SP_TOKEN_HASH_S")
.unique(false) .unique(false)
.withColumns("HASH_SYS"); .withColumns("HASH_SYS")
.doNothing();
spidxToken spidxToken
.addIndex("20180903.37", "IDX_SP_TOKEN_HASH_SV") .addIndex("20180903.37", "IDX_SP_TOKEN_HASH_SV")
.unique(false) .unique(false)
.withColumns("HASH_SYS_AND_VALUE"); .withColumns("HASH_SYS_AND_VALUE")
.doNothing();
spidxToken spidxToken
.addIndex("20180903.38", "IDX_SP_TOKEN_HASH_V") .addIndex("20180903.38", "IDX_SP_TOKEN_HASH_V")
.unique(false) .unique(false)
.withColumns("HASH_VALUE"); .withColumns("HASH_VALUE")
.doNothing();
spidxToken spidxToken
.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.39") .addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.39")
.setColumnName("HASH_IDENTITY") .setColumnName("HASH_IDENTITY")

View File

@ -5329,7 +5329,7 @@ public class FhirResourceDaoR4LegacySearchBuilderTest extends BaseJpaR4Test {
} }
@Override @Override
protected Fixture getFixture() { protected Fixture constructFixture() {
return new TestDataBuilderFixture(FhirResourceDaoR4LegacySearchBuilderTest.this, myObservationDao); return new TestDataBuilderFixture(FhirResourceDaoR4LegacySearchBuilderTest.this, myObservationDao);
} }
} }

View File

@ -1,58 +0,0 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.config.TestHibernateSearchAddInConfig;
import ca.uhn.fhir.jpa.config.TestR4Config;
import ca.uhn.fhir.jpa.dao.BaseDateSearchDaoTests;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.DaoTestDataBuilder;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import org.hl7.fhir.r4.model.Observation;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.transaction.PlatformTransactionManager;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {TestR4Config.class, TestHibernateSearchAddInConfig.NoFT.class})
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
public class FhirResourceDaoR4LuceneDisabledStandardQueries extends BaseJpaTest {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4LuceneDisabledStandardQueries.class);
@Autowired
PlatformTransactionManager myTxManager;
@Autowired
FhirContext myFhirCtx;
@Autowired
@Qualifier("myObservationDaoR4")
IFhirResourceDao<Observation> myObservationDao;
@Autowired
protected DaoRegistry myDaoRegistry;
@Override
protected PlatformTransactionManager getTxManager() {
return myTxManager;
}
@Override
protected FhirContext getFhirContext() {
return myFhirCtx;
}
@Nested
public class DateSearchTests extends BaseDateSearchDaoTests {
@Override
protected Fixture getFixture() {
DaoTestDataBuilder testDataBuilder = new DaoTestDataBuilder(myFhirCtx, myDaoRegistry, new SystemRequestDetails());
return new TestDataBuilderFixture<>(testDataBuilder, myObservationDao);
}
}
}

View File

@ -756,11 +756,10 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
public class DateSearchIT extends BaseDateSearchDaoTests { public class DateSearchIT extends BaseDateSearchDaoTests {
@Override @Override
protected Fixture getFixture() { protected Fixture constructFixture() {
DaoTestDataBuilder testDataBuilder = new DaoTestDataBuilder(myFhirCtx, myDaoRegistry, new SystemRequestDetails()); DaoTestDataBuilder testDataBuilder = new DaoTestDataBuilder(myFhirCtx, myDaoRegistry, new SystemRequestDetails());
return new TestDataBuilderFixture<>(testDataBuilder, myObservationDao); return new TestDataBuilderFixture<>(testDataBuilder, myObservationDao);
} }
} }
} }

View File

@ -0,0 +1,252 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.config.TestHibernateSearchAddInConfig;
import ca.uhn.fhir.jpa.config.TestR4Config;
import ca.uhn.fhir.jpa.dao.BaseDateSearchDaoTests;
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.DaoTestDataBuilder;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceSearch;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.server.method.SortParameter;
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
import org.apache.commons.lang3.tuple.ImmutableTriple;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Observation;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.web.util.UriComponents;
import org.springframework.web.util.UriComponentsBuilder;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.function.Consumer;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.not;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {TestR4Config.class, TestHibernateSearchAddInConfig.NoFT.class})
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
public class FhirResourceDaoR4StandardQueriesNoFTTest extends BaseJpaTest {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4StandardQueriesNoFTTest.class);
@Autowired
PlatformTransactionManager myTxManager;
@Autowired
FhirContext myFhirCtx;
@Autowired
@Qualifier("myObservationDaoR4")
IFhirResourceDao<Observation> myObservationDao;
@Autowired
protected DaoRegistry myDaoRegistry;
@Autowired
MatchUrlService myMatchUrlService;
@Override
protected PlatformTransactionManager getTxManager() {
return myTxManager;
}
@Override
protected FhirContext getFhirContext() {
return myFhirCtx;
}
@Nested
public class DateSearchTests extends BaseDateSearchDaoTests {
@Override
protected Fixture constructFixture() {
DaoTestDataBuilder testDataBuilder = new DaoTestDataBuilder(myFhirCtx, myDaoRegistry, new SystemRequestDetails());
return new TestDataBuilderFixture<>(testDataBuilder, myObservationDao);
}
}
public static class TokenTestCase {
private Consumer<IBaseResource>[] myBuilders;
private List<ImmutableTriple<Boolean, String, String>> mySearchCases = new ArrayList<>();
public static TokenTestCase onObservation(Consumer<IBaseResource>... theBuilders) {
TokenTestCase result = new TokenTestCase();
result.myBuilders = theBuilders;
return result;
}
public TokenTestCase finds(String theMessage, String theQuery) {
mySearchCases.add(new ImmutableTriple(true,theMessage, theQuery));
return this;
}
public TokenTestCase doesNotFind(String theMessage, String theQuery) {
mySearchCases.add(new ImmutableTriple(false,theMessage, theQuery));
return this;
}
}
@Nested
public class TokenSearch {
// wipmb make this generic and share with ES, and Mongo.
/*
String criteria = "_has:Condition:subject:code=http://snomed.info/sct|55822003,http://snomed.info/sct|55822005&" +
"_has:Condition:asserter:code=http://snomed.info/sct|55822003,http://snomed.info/sct|55822004";
*/
ITestDataBuilder myDataBuilder = new DaoTestDataBuilder(myFhirCtx, myDaoRegistry, new SystemRequestDetails());
Set<IIdType> myCreatedIds = new HashSet<>();
@AfterEach
public void cleanup() {
ourLog.info("cleanup {}", myCreatedIds);
myCreatedIds.forEach(myObservationDao::delete);
}
@Nested
public class Queries {
IIdType myObservationId;
@Test
public void systemAndCode() {
withObservation(myDataBuilder.withObservationCode("http://example.com", "value"));
assertFind("by system and code", "/Observation?code=http://example.com|value");
assertFind("by system, any code", "/Observation?code=http://example.com|");
assertFind("by code, any system", "/Observation?code=value");
assertNotFind("by same system, different code", "/Observation?code=http://example.com|other");
assertNotFind("by same code, different system", "/Observation?code=http://example2.com|value");
assertNotFind("by different code, different system", "/Observation?code=http://example2.com|otherValue");
}
@Test
public void emptySystem() {
withObservation(myDataBuilder.withObservationCode("", "value"));
assertFind("by system and code", "/Observation?code=|value");
assertFind("by system, any code", "/Observation?code=|");
assertFind("by code, any system", "/Observation?code=value");
}
@Nested
public class NotModifier {
@Test
public void simple() {
withObservation(myDataBuilder.withObservationCode("http://example.com", "value"));
assertFind("by same system, different code", "/Observation?code:not=http://example.com|other");
assertFind("by same code, different system", "/Observation?code:not=http://example2.com|value");
assertFind("by different code, different system", "/Observation?code:not=http://example2.com|otherValue");
assertNotFind("by system and code", "/Observation?code:not=http://example.com|value");
assertNotFind("by system, any code", "/Observation?code:not=http://example.com|");
assertNotFind("by code, any system", "/Observation?code:not=value");
}
@Test
public void findsEmpty() {
withObservation();
assertFind("by system and code", "/Observation?code:not=http://example.com|value");
assertFind("by system, any code", "/Observation?code:not=http://example.com|");
assertFind("by code, any system", "/Observation?code:not=value");
}
}
@Nested
public class TextModifier {
@Test
public void systemAndCode() {
withObservation(myDataBuilder.withObservationCode("http://example.com", "value", "the display text"));
assertFind("by code display", "/Observation?code:text=the%20display%20text");
}
}
@Nested
public class Sorting {
@Test
public void sortBySystemThenValue() {
String idAlphaM = withObservation(myDataBuilder.withObservationCode("http://alpha.org", "Mvalue")).getIdPart();
String idAlphaA = withObservation(myDataBuilder.withObservationCode("http://alpha.org", "Avalue")).getIdPart();
String idAlphaZ = withObservation(myDataBuilder.withObservationCode("http://alpha.org", "Zvalue")).getIdPart();
String idExD = withObservation(myDataBuilder.withObservationCode("http://example.org", "DValue")).getIdPart();
String idExA = withObservation(myDataBuilder.withObservationCode("http://example.org", "AValue")).getIdPart();
String idExM = withObservation(myDataBuilder.withObservationCode("http://example.org", "MValue")).getIdPart();
List<String> allIds = searchForIds("/Observation?_sort=code");
assertThat(allIds, hasItems(idAlphaA, idAlphaM, idAlphaZ, idExA, idExD, idExM));
allIds = searchForIds("/Observation?_sort=code&code=http://example.org|");
assertThat(allIds, hasItems(idExA, idExD, idExM));
}
}
private IIdType withObservation(Consumer<IBaseResource>... theBuilder) {
myObservationId = myDataBuilder.createObservation(theBuilder);
myCreatedIds.add(myObservationId);
return myObservationId;
}
private void assertFind(String theMessage, String theUrl) {
List<String> resourceIds = searchForIds(theUrl);
assertThat(theMessage, resourceIds, hasItem(equalTo(myObservationId.getIdPart())));
}
private void assertNotFind(String theMessage, String theUrl) {
List<String> resourceIds = searchForIds(theUrl);
assertThat(theMessage, resourceIds, not(hasItem(equalTo(myObservationId.getIdPart()))));
}
}
private List<String> searchForIds(String theQueryUrl) {
// fake out the server url parsing
ResourceSearch search = myMatchUrlService.getResourceSearch(theQueryUrl);
SearchParameterMap map = search.getSearchParameterMap();
map.setLoadSynchronous(true);
SystemRequestDetails request = fakeRequestDetailsFromUrl(theQueryUrl);
SortSpec sort = (SortSpec) new SortParameter(myFhirCtx).translateQueryParametersIntoServerArgument(request, null);
if (sort != null) {
map.setSort(sort);
}
IBundleProvider result = myObservationDao.search(map);
List<String> resourceIds = result.getAllResourceIds();
return resourceIds;
}
}
@Nonnull
private SystemRequestDetails fakeRequestDetailsFromUrl(String theQueryUrl) {
SystemRequestDetails request = new SystemRequestDetails();
UriComponents uriComponents = UriComponentsBuilder.fromUriString(theQueryUrl).build();
uriComponents.getQueryParams().entrySet().forEach(nextEntry -> {
request.addParameter(nextEntry.getKey(), nextEntry.getValue().toArray(new String[0]));
});
return request;
}
}

View File

@ -70,6 +70,7 @@ import org.slf4j.LoggerFactory;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -101,6 +102,9 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
@BeforeEach @BeforeEach
public void disableAdvanceIndexing() { public void disableAdvanceIndexing() {
myDaoConfig.setAdvancedLuceneIndexing(false); myDaoConfig.setAdvancedLuceneIndexing(false);
// ugh - somewhere the hibernate round trip is mangling LocalDate to h2 date column unless the tz=GMT
TimeZone.setDefault(TimeZone.getTimeZone("GMT"));
ourLog.info("Running with Timezone {}", TimeZone.getDefault().getID());
} }
@Test @Test

View File

@ -53,89 +53,88 @@ class TokenAutocompleteAggregationTest {
@Nested @Nested
public class ResultExtraction { public class ResultExtraction {
// Sample result from elastic for Observation.code // Sample result from elastic for Observation.code
String resultJson = """ String resultJson = "{ " +
{ " \"doc_count\": 22770, " +
"doc_count": 22770, " \"search\": { " +
"search": { " \"doc_count\": 4, " +
"doc_count": 4, " \"group_by_token\": { " +
"group_by_token": { " \"doc_count_error_upper_bound\": 0, " +
"doc_count_error_upper_bound": 0, " \"sum_other_doc_count\": 0, " +
"sum_other_doc_count": 0, " \"buckets\": [ " +
"buckets": [ " { " +
{ " \"key\": \"http://loinc.org|59460-6\", " +
"key": "http://loinc.org|59460-6", " \"doc_count\": 2, " +
"doc_count": 2, " \"top_tags_hits\": { " +
"top_tags_hits": { " \"hits\": { " +
"hits": { " \"total\": { " +
"total": { " \"value\": 2, " +
"value": 2, " \"relation\": \"eq\" " +
"relation": "eq" " }, " +
}, " \"max_score\": 4.9845064e-05, " +
"max_score": 4.9845064e-05, " \"hits\": [ " +
"hits": [ " { " +
{ " \"_index\": \"resourcetable-000001\", " +
"_index": "resourcetable-000001", " \"_type\": \"_doc\", " +
"_type": "_doc", " \"_id\": \"1405280\", " +
"_id": "1405280", " \"_nested\": { " +
"_nested": { " \"field\": \"nsp.code\", " +
"field": "nsp.code", " \"offset\": 0 " +
"offset": 0 " }, " +
}, " \"_score\": 4.9845064e-05, " +
"_score": 4.9845064e-05, " \"_source\": { " +
"_source": { " \"string\": { " +
"string": { " \"text\": \"Fall risk total [Morse Fall Scale]\" " +
"text": "Fall risk total [Morse Fall Scale]" " }, " +
}, " \"token\": { " +
"token": { " \"code\": \"59460-6\", " +
"code": "59460-6", " \"system\": \"http://loinc.org\", " +
"system": "http://loinc.org", " \"code-system\": \"http://loinc.org|59460-6\" " +
"code-system": "http://loinc.org|59460-6" " } " +
} " } " +
} " } " +
} " ] " +
] " } " +
} " } " +
} " }, " +
}, " { " +
{ " \"key\": \"http://loinc.org|59461-4\", " +
"key": "http://loinc.org|59461-4", " \"doc_count\": 2, " +
"doc_count": 2, " \"top_tags_hits\": { " +
"top_tags_hits": { " \"hits\": { " +
"hits": { " \"total\": { " +
"total": { " \"value\": 2, " +
"value": 2, " \"relation\": \"eq\" " +
"relation": "eq" " }, " +
}, " \"max_score\": 4.9845064e-05, " +
"max_score": 4.9845064e-05, " \"hits\": [ " +
"hits": [ " { " +
{ " \"_index\": \"resourcetable-000001\", " +
"_index": "resourcetable-000001", " \"_type\": \"_doc\", " +
"_type": "_doc", " \"_id\": \"1405281\", " +
"_id": "1405281", " \"_nested\": { " +
"_nested": { " \"field\": \"nsp.code\", " +
"field": "nsp.code", " \"offset\": 0 " +
"offset": 0 " }, " +
}, " \"_score\": 4.9845064e-05, " +
"_score": 4.9845064e-05, " \"_source\": { " +
"_source": { " \"string\": { " +
"string": { " \"text\": \"Fall risk level [Morse Fall Scale]\" " +
"text": "Fall risk level [Morse Fall Scale]" " }, " +
}, " \"token\": { " +
"token": { " \"code\": \"59461-4\", " +
"code": "59461-4", " \"system\": \"http://loinc.org\", " +
"system": "http://loinc.org", " \"code-system\": \"http://loinc.org|59461-4\" " +
"code-system": "http://loinc.org|59461-4" " } " +
} " } " +
} " } " +
} " ] " +
] " } " +
} " } " +
} " } " +
} " ] " +
] " } " +
} " } " +
} "}";
}""";
JsonObject parsedResult = new Gson().fromJson(resultJson, JsonObject.class); JsonObject parsedResult = new Gson().fromJson(resultJson, JsonObject.class);
TokenAutocompleteAggregation myAutocompleteAggregation = new TokenAutocompleteAggregation("code", 22, null, null); TokenAutocompleteAggregation myAutocompleteAggregation = new TokenAutocompleteAggregation("code", 22, null, null);

View File

@ -97,7 +97,10 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_DATE") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_DATE")
@Column(name = "SP_ID") @Column(name = "SP_ID")
private Long myId; private Long myId;
/** /**
* Composite of resourceType, paramName, and partition info if configured.
* Combined with the various date fields for a query.
* @since 3.5.0 - At some point this should be made not-null * @since 3.5.0 - At some point this should be made not-null
*/ */
@Column(name = "HASH_IDENTITY", nullable = true) @Column(name = "HASH_IDENTITY", nullable = true)

View File

@ -34,7 +34,6 @@ import org.apache.commons.lang3.builder.ToStringStyle;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Embeddable; import javax.persistence.Embeddable;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.ForeignKey; import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue; import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType; import javax.persistence.GenerationType;

View File

@ -59,16 +59,12 @@ import static org.apache.commons.lang3.StringUtils.trim;
* IDX_SP_TOKEN_UNQUAL * IDX_SP_TOKEN_UNQUAL
*/ */
// TODO PERF Recommend to drop this index (added by JA - I don't actually think we even need the identity hash for this type, we could potentially drop the column too): @Index(name = "IDX_SP_TOKEN_HASH_V2", columnList = "HASH_IDENTITY,SP_SYSTEM,SP_VALUE,RES_ID,PARTITION_ID"),
@Index(name = "IDX_SP_TOKEN_HASH", columnList = "HASH_IDENTITY"), @Index(name = "IDX_SP_TOKEN_HASH_S_V2", columnList = "HASH_SYS,RES_ID,PARTITION_ID"),
@Index(name = "IDX_SP_TOKEN_HASH_S", columnList = "HASH_SYS"), @Index(name = "IDX_SP_TOKEN_HASH_SV_V2", columnList = "HASH_SYS_AND_VALUE,RES_ID,PARTITION_ID"),
@Index(name = "IDX_SP_TOKEN_HASH_SV", columnList = "HASH_SYS_AND_VALUE"), @Index(name = "IDX_SP_TOKEN_HASH_V_V2", columnList = "HASH_VALUE,RES_ID,PARTITION_ID"),
// TODO PERF change this to:
// @Index(name = "IDX_SP_TOKEN_HASH_V", columnList = "HASH_VALUE,RES_ID"),
@Index(name = "IDX_SP_TOKEN_HASH_V", columnList = "HASH_VALUE"),
@Index(name = "IDX_SP_TOKEN_UPDATED", columnList = "SP_UPDATED"), @Index(name = "IDX_SP_TOKEN_RESID_V2", columnList = "RES_ID,HASH_SYS_AND_VALUE,HASH_VALUE,HASH_SYS,HASH_IDENTITY,PARTITION_ID")
@Index(name = "IDX_SP_TOKEN_RESID", columnList = "RES_ID")
}) })
public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchParam { public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchParam {
@ -112,11 +108,10 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
private Long myHashValue; private Long myHashValue;
@ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {}) @ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {})
@JoinColumn(foreignKey = @ForeignKey(name = "FK7ULX3J1GG3V7MAQREJGC7YBC4"), @JoinColumn(foreignKey = @ForeignKey(name="FK_SP_TOKEN_RES"),
name = "RES_ID", referencedColumnName = "RES_ID", nullable = false) name = "RES_ID", referencedColumnName = "RES_ID", nullable = false)
private ResourceTable myResource; private ResourceTable myResource;
/** /**
* Constructor * Constructor
*/ */

View File

@ -8,13 +8,18 @@ import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.test.utilities.ITestDataBuilder; import ca.uhn.fhir.test.utilities.ITestDataBuilder;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.TimeZone;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -30,6 +35,29 @@ public abstract class BaseDateSearchDaoTests {
*/ */
IIdType myObservationId; IIdType myObservationId;
Fixture myFixture;
//time zone set to EST
@BeforeEach
public void setTimeZoneEST() {
TimeZone.setDefault(TimeZone.getTimeZone("EST"));
}
//reset time zone back to match the system
@AfterEach
public void resetTimeZone() {
TimeZone.setDefault(null);
}
@BeforeEach
public void setupFixture() {
myFixture = constructFixture();
}
@AfterEach
public void cleanup() {
myFixture.cleanup();
}
/** /**
* Test for our date search operators. * Test for our date search operators.
* <p> * <p>
@ -50,15 +78,14 @@ public abstract class BaseDateSearchDaoTests {
//@CsvSource("2019-12-31T08:00:00,eq2020,false,inline,1") //@CsvSource("2019-12-31T08:00:00,eq2020,false,inline,1")
@MethodSource("dateSearchCases") @MethodSource("dateSearchCases")
public void testDateSearchMatching(String theResourceDate, String theQuery, boolean theExpectedMatch, String theFileName, int theLineNumber) { public void testDateSearchMatching(String theResourceDate, String theQuery, boolean theExpectedMatch, String theFileName, int theLineNumber) {
Fixture fixture = getFixture();
if (isShouldSkip(theResourceDate, theQuery)) { if (isShouldSkip(theResourceDate, theQuery)) {
return; return;
} }
// setup // setup
myObservationId = fixture.createObservationWithEffectiveDate(theResourceDate); myObservationId = myFixture.createObservationWithEffectiveDate(theResourceDate);
// run the query // run the query
boolean matched = fixture.isObservationSearchMatch(theQuery, myObservationId); boolean matched = myFixture.isObservationSearchMatch(theQuery, myObservationId);
assertExpectedMatch(theResourceDate, theQuery, theExpectedMatch, matched, theFileName, theLineNumber); assertExpectedMatch(theResourceDate, theQuery, theExpectedMatch, matched, theFileName, theLineNumber);
} }
@ -91,7 +118,7 @@ public abstract class BaseDateSearchDaoTests {
* *
* Use an abstract method instead of a constructor because JUnit has a such a funky lifecycle. * Use an abstract method instead of a constructor because JUnit has a such a funky lifecycle.
*/ */
protected abstract Fixture getFixture(); protected abstract Fixture constructFixture();
public interface Fixture { public interface Fixture {
/** /**
@ -104,11 +131,13 @@ public abstract class BaseDateSearchDaoTests {
*/ */
boolean isObservationSearchMatch(String theQuery, IIdType theObservationId); boolean isObservationSearchMatch(String theQuery, IIdType theObservationId);
void cleanup();
} }
public static class TestDataBuilderFixture<O extends IBaseResource> implements Fixture { public static class TestDataBuilderFixture<O extends IBaseResource> implements Fixture {
final ITestDataBuilder myTestDataBuilder; final ITestDataBuilder myTestDataBuilder;
final IFhirResourceDao<O> myObservationDao; final IFhirResourceDao<O> myObservationDao;
final Set<IIdType> myCreatedIds = new HashSet<>();
public TestDataBuilderFixture(ITestDataBuilder theTestDataBuilder, IFhirResourceDao<O> theObservationDao) { public TestDataBuilderFixture(ITestDataBuilder theTestDataBuilder, IFhirResourceDao<O> theObservationDao) {
myTestDataBuilder = theTestDataBuilder; myTestDataBuilder = theTestDataBuilder;
@ -117,7 +146,9 @@ public abstract class BaseDateSearchDaoTests {
@Override @Override
public IIdType createObservationWithEffectiveDate(String theResourceDate) { public IIdType createObservationWithEffectiveDate(String theResourceDate) {
return myTestDataBuilder.createObservation(myTestDataBuilder.withEffectiveDate(theResourceDate)); IIdType id = myTestDataBuilder.createObservation(myTestDataBuilder.withEffectiveDate(theResourceDate));
myCreatedIds.add(id);
return id;
} }
@Override @Override
@ -131,5 +162,11 @@ public abstract class BaseDateSearchDaoTests {
boolean matched = results.getAllResourceIds().contains(theObservationId.getIdPart()); boolean matched = results.getAllResourceIds().contains(theObservationId.getIdPart());
return matched; return matched;
} }
@Override
public void cleanup() {
myCreatedIds.forEach(myObservationDao::delete);
myCreatedIds.clear();
}
} }
} }

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.util.FhirTerser; import ca.uhn.fhir.util.FhirTerser;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseReference; import org.hl7.fhir.instance.model.api.IBaseReference;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
@ -185,11 +186,18 @@ public interface ITestDataBuilder {
} }
default Consumer<IBaseResource> withObservationCode(@Nullable String theSystem, @Nullable String theCode) { default Consumer<IBaseResource> withObservationCode(@Nullable String theSystem, @Nullable String theCode) {
return withObservationCode(theSystem, theCode, null);
}
default Consumer<IBaseResource> withObservationCode(@Nullable String theSystem, @Nullable String theCode, String theDisplay) {
return t -> { return t -> {
FhirTerser terser = getFhirContext().newTerser(); FhirTerser terser = getFhirContext().newTerser();
IBase coding = terser.addElement(t, "code.coding"); IBase coding = terser.addElement(t, "code.coding");
terser.addElement(coding, "system", theSystem); terser.addElement(coding, "system", theSystem);
terser.addElement(coding, "code", theCode); terser.addElement(coding, "code", theCode);
if (StringUtils.isNotEmpty(theDisplay)) {
terser.addElement(coding, "display", theDisplay);
}
}; };
} }