From 77305eb5707d44497739f1895182bc2e5f4b8774 Mon Sep 17 00:00:00 2001 From: James Agnew Date: Tue, 4 Sep 2018 10:32:08 +0800 Subject: [PATCH] Work on migrator --- .../uhn/fhir/jpa/entity/BaseHasResource.java | 4 +- .../BaseResourceIndexedSearchParam.java | 4 +- .../java/ca/uhn/fhir/jpa/entity/ForcedId.java | 4 +- .../fhir/jpa/entity/ResourceEncodingEnum.java | 16 +- .../fhir/jpa/entity/ResourceHistoryTable.java | 4 +- .../ResourceIndexedCompositeStringUnique.java | 4 +- .../ResourceIndexedSearchParamCoords.java | 6 +- .../ResourceIndexedSearchParamDate.java | 6 +- .../ResourceIndexedSearchParamNumber.java | 6 +- .../ResourceIndexedSearchParamQuantity.java | 7 +- .../ResourceIndexedSearchParamString.java | 4 +- .../ResourceIndexedSearchParamToken.java | 4 +- .../entity/ResourceIndexedSearchParamUri.java | 4 +- .../ca/uhn/fhir/jpa/entity/ResourceLink.java | 49 +-- .../fhir/jpa/entity/ResourceSearchView.java | 56 ++-- .../java/ca/uhn/fhir/jpa/entity/Search.java | 4 +- .../fhir/jpa/entity/SearchParamPresent.java | 5 +- .../ca/uhn/fhir/jpa/entity/TermConcept.java | 8 +- .../jpa/entity/TermConceptDesignation.java | 4 +- .../uhn/fhir/jpa/entity/TermConceptMap.java | 4 +- .../fhir/jpa/entity/TermConceptMapGroup.java | 6 +- .../entity/TermConceptMapGroupElement.java | 6 +- .../TermConceptMapGroupElementTarget.java | 6 +- .../fhir/jpa/entity/TermConceptProperty.java | 4 +- .../TermConceptPropertyFieldBridge.java | 4 +- .../ca/uhn/fhir/jpa/config/TestR4Config.java | 2 +- .../ca/uhn/fhir/jpa/migrate/JdbcUtils.java | 107 +++++- .../jpa/migrate/taskdef/AddColumnTask.java | 30 ++ .../jpa/migrate/taskdef/AddIndexTask.java | 19 +- .../jpa/migrate/taskdef/ArbitrarySqlTask.java | 90 +++++ .../migrate/taskdef/BaseTableColumnTask.java | 30 ++ .../taskdef/BaseTableColumnTypeTask.java | 122 +++++++ .../jpa/migrate/taskdef/BaseTableTask.java | 21 ++ .../fhir/jpa/migrate/taskdef/BaseTask.java | 2 +- .../migrate/taskdef/CalculateHashesTask.java | 132 ++++++-- .../jpa/migrate/taskdef/DropIndexTask.java | 66 ++-- .../jpa/migrate/taskdef/ModifyColumnTask.java | 69 ++++ .../tasks/HapiFhirJpaMigrationTasks.java | 312 ++++++++++++++++-- .../jpa/migrate/taskdef/AddColumnTest.java | 44 +++ .../jpa/migrate/taskdef/ArbitrarySqlTest.java | 53 +++ .../fhir/jpa/migrate/taskdef/BaseTest.java | 14 +- .../jpa/migrate/taskdef/CreateHashesTest.java | 53 +++ .../jpa/migrate/taskdef/ModifyColumnTest.java | 32 ++ .../tasks/HapiFhirJpaMigrationTasksTest.java | 1 - .../ctx/DefaultProfileValidationSupport.java | 17 +- 45 files changed, 1223 insertions(+), 222 deletions(-) create mode 100644 hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java create mode 100644 hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java create mode 100644 hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTask.java create mode 100644 hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java create mode 100644 hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableTask.java create mode 100644 hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java create mode 100644 hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTest.java create mode 100644 hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTest.java create mode 100644 hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CreateHashesTest.java create mode 100644 hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTest.java diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseHasResource.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseHasResource.java index 8d8d270471c..1d7c40a5baf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseHasResource.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseHasResource.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java index 97890d45032..6ac896c89a7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BaseResourceIndexedSearchParam.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ForcedId.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ForcedId.java index 0c451533c70..0ce58be1322 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ForcedId.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ForcedId.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceEncodingEnum.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceEncodingEnum.java index d7b00c15277..19e965d5e44 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceEncodingEnum.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceEncodingEnum.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -34,10 +34,14 @@ public enum ResourceEncodingEnum { * See ResourceHistoryTable RES_ENCODING column */ - /** Json */ + /** + * Json + */ JSON, - - /** Json Compressed */ + + /** + * Json Compressed + */ JSONC, /** @@ -48,5 +52,5 @@ public enum ResourceEncodingEnum { public IParser newParser(FhirContext theContext) { return theContext.newJsonParser(); } - + } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceHistoryTable.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceHistoryTable.java index 19e66707ba6..972d4d2f36a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceHistoryTable.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceHistoryTable.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedCompositeStringUnique.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedCompositeStringUnique.java index 0da8a00bc3e..dd25b7e6663 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedCompositeStringUnique.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedCompositeStringUnique.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamCoords.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamCoords.java index 95bb57d1cb3..c27a86e4f8d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamCoords.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamCoords.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -32,7 +32,7 @@ import javax.persistence.*; @Embeddable @Entity @Table(name = "HFJ_SPIDX_COORDS", indexes = { - @Index(name = "IDX_SP_COORDS_HASH", columnList = "HASH_IDENTITY,SP_VALUE,SP_LATITUDE,SP_LONGITUDE"), + @Index(name = "IDX_SP_COORDS_HASH", columnList = "HASH_IDENTITY,SP_LATITUDE,SP_LONGITUDE"), @Index(name = "IDX_SP_COORDS_UPDATED", columnList = "SP_UPDATED"), @Index(name = "IDX_SP_COORDS_RESID", columnList = "RES_ID") }) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamDate.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamDate.java index 63e54be4b30..31956ce0573 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamDate.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamDate.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -37,7 +37,7 @@ import java.util.Date; @Embeddable @Entity @Table(name = "HFJ_SPIDX_DATE", indexes = { -// @Index(name = "IDX_SP_DATE", columnList = "RES_TYPE,SP_NAME,SP_VALUE_LOW,SP_VALUE_HIGH"), + // We previously had an index called IDX_SP_DATE - Dont reuse @Index(name = "IDX_SP_DATE_HASH", columnList = "HASH_IDENTITY,SP_VALUE_LOW,SP_VALUE_HIGH"), @Index(name = "IDX_SP_DATE_UPDATED", columnList = "SP_UPDATED"), @Index(name = "IDX_SP_DATE_RESID", columnList = "RES_ID") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamNumber.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamNumber.java index 71d5f0b56fb..6a8adab9aeb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamNumber.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamNumber.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -37,7 +37,7 @@ import java.math.BigDecimal; @Embeddable @Entity @Table(name = "HFJ_SPIDX_NUMBER", indexes = { -// @Index(name = "IDX_SP_NUMBER", columnList = "RES_TYPE,SP_NAME,SP_VALUE"), +// We used to have an index with name IDX_SP_NUMBER - Dont reuse @Index(name = "IDX_SP_NUMBER_HASH_VAL", columnList = "HASH_IDENTITY,SP_VALUE"), @Index(name = "IDX_SP_NUMBER_UPDATED", columnList = "SP_UPDATED"), @Index(name = "IDX_SP_NUMBER_RESID", columnList = "RES_ID") diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamQuantity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamQuantity.java index 4790a781605..2ba9f580b5f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamQuantity.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamQuantity.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,9 +38,10 @@ import java.math.BigDecimal; @Embeddable @Entity @Table(name = "HFJ_SPIDX_QUANTITY", indexes = { -// @Index(name = "IDX_SP_QUANTITY", columnList = "RES_TYPE,SP_NAME,SP_SYSTEM,SP_UNITS,SP_VALUE"), +// We used to have an index named IDX_SP_QUANTITY - Dont reuse @Index(name = "IDX_SP_QUANTITY_HASH", columnList = "HASH_IDENTITY,SP_VALUE"), @Index(name = "IDX_SP_QUANTITY_HASH_UN", columnList = "HASH_IDENTITY_AND_UNITS,SP_VALUE"), + @Index(name = "IDX_SP_QUANTITY_HASH_SYSUN", columnList = "HASH_IDENTITY_SYS_UNITS,SP_VALUE"), @Index(name = "IDX_SP_QUANTITY_UPDATED", columnList = "SP_UPDATED"), @Index(name = "IDX_SP_QUANTITY_RESID", columnList = "RES_ID") }) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamString.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamString.java index 1ee6dfccd46..1f281f408ae 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamString.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamString.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamToken.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamToken.java index 75938fb79e8..adfef9efe4e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamToken.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamToken.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamUri.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamUri.java index 6f35d8809e9..df9d6baaeb1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamUri.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceIndexedSearchParamUri.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceLink.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceLink.java index 8f027ce5276..ff8ef70c96a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceLink.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceLink.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,36 +20,21 @@ package ca.uhn.fhir.jpa.entity; * #L% */ -import java.io.Serializable; -import java.util.Date; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.ForeignKey; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.SequenceGenerator; -import javax.persistence.Table; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; - import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.hibernate.annotations.ColumnDefault; import org.hibernate.search.annotations.Field; import org.hl7.fhir.instance.model.api.IIdType; +import javax.persistence.*; +import java.io.Serializable; +import java.util.Date; + @Entity -@Table(name = "HFJ_RES_LINK" , indexes= { - @Index(name="IDX_RL_TPATHRES", columnList= "SRC_PATH,TARGET_RESOURCE_ID"), - @Index(name="IDX_RL_SRC", columnList= "SRC_RESOURCE_ID"), - @Index(name="IDX_RL_DEST", columnList= "TARGET_RESOURCE_ID") +@Table(name = "HFJ_RES_LINK", indexes = { + @Index(name = "IDX_RL_TPATHRES", columnList = "SRC_PATH,TARGET_RESOURCE_ID"), + @Index(name = "IDX_RL_SRC", columnList = "SRC_RESOURCE_ID"), + @Index(name = "IDX_RL_DEST", columnList = "TARGET_RESOURCE_ID") }) public class ResourceLink implements Serializable { @@ -64,30 +49,30 @@ public class ResourceLink implements Serializable { @Column(name = "SRC_PATH", length = 100, nullable = false) private String mySourcePath; - @ManyToOne(optional = false, fetch=FetchType.LAZY) - @JoinColumn(name = "SRC_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey=@ForeignKey(name="FK_RESLINK_SOURCE")) + @ManyToOne(optional = false, fetch = FetchType.LAZY) + @JoinColumn(name = "SRC_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = false, foreignKey = @ForeignKey(name = "FK_RESLINK_SOURCE")) private ResourceTable mySourceResource; @Column(name = "SRC_RESOURCE_ID", insertable = false, updatable = false, nullable = false) private Long mySourceResourcePid; - @Column(name = "SOURCE_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN) + @Column(name = "SOURCE_RESOURCE_TYPE", nullable = false, length = ResourceTable.RESTYPE_LEN) @Field() private String mySourceResourceType; - @ManyToOne(optional = true, fetch=FetchType.LAZY) - @JoinColumn(name = "TARGET_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = true, foreignKey=@ForeignKey(name="FK_RESLINK_TARGET")) + @ManyToOne(optional = true, fetch = FetchType.LAZY) + @JoinColumn(name = "TARGET_RESOURCE_ID", referencedColumnName = "RES_ID", nullable = true, foreignKey = @ForeignKey(name = "FK_RESLINK_TARGET")) private ResourceTable myTargetResource; @Column(name = "TARGET_RESOURCE_ID", insertable = false, updatable = false, nullable = true) @Field() private Long myTargetResourcePid; - @Column(name = "TARGET_RESOURCE_TYPE", nullable=false, length=ResourceTable.RESTYPE_LEN) + @Column(name = "TARGET_RESOURCE_TYPE", nullable = false, length = ResourceTable.RESTYPE_LEN) @Field() private String myTargetResourceType; - @Column(name = "TARGET_RESOURCE_URL", length=200, nullable = true) + @Column(name = "TARGET_RESOURCE_URL", length = 200, nullable = true) @Field() private String myTargetResourceUrl; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java index e62712ebbe7..c5700caa51a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceSearchView.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,44 +19,36 @@ package ca.uhn.fhir.jpa.entity; * limitations under the License. * #L% */ -import java.io.Serializable; -import java.util.Date; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.Id; -import javax.persistence.Lob; -import javax.persistence.Temporal; -import javax.persistence.TemporalType; - -import org.hibernate.annotations.Immutable; -import org.hibernate.annotations.Subselect; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.InstantDt; import ca.uhn.fhir.rest.api.Constants; +import org.hibernate.annotations.Immutable; +import org.hibernate.annotations.Subselect; + +import javax.persistence.*; +import java.io.Serializable; +import java.util.Date; //@formatter:off @Entity @Immutable -@Subselect("SELECT h.pid as pid " + - ", h.res_id as res_id " + - ", h.res_type as res_type " + - ", h.res_version as res_version " + // FHIR version - ", h.res_ver as res_ver " + // resource version - ", h.has_tags as has_tags " + - ", h.res_deleted_at as res_deleted_at " + - ", h.res_published as res_published " + - ", h.res_updated as res_updated " + - ", h.res_text as res_text " + - ", h.res_encoding as res_encoding " + - ", f.forced_id as forced_pid " + - "FROM HFJ_RES_VER h " - + " LEFT OUTER JOIN HFJ_FORCED_ID f ON f.resource_pid = h.res_id " - + " INNER JOIN HFJ_RESOURCE r ON r.res_id = h.res_id and r.res_ver = h.res_ver") +@Subselect("SELECT h.pid as pid " + + ", h.res_id as res_id " + + ", h.res_type as res_type " + + ", h.res_version as res_version " + // FHIR version + ", h.res_ver as res_ver " + // resource version + ", h.has_tags as has_tags " + + ", h.res_deleted_at as res_deleted_at " + + ", h.res_published as res_published " + + ", h.res_updated as res_updated " + + ", h.res_text as res_text " + + ", h.res_encoding as res_encoding " + + ", f.forced_id as forced_pid " + + "FROM HFJ_RES_VER h " + + " LEFT OUTER JOIN HFJ_FORCED_ID f ON f.resource_pid = h.res_id " + + " INNER JOIN HFJ_RESOURCE r ON r.res_id = h.res_id and r.res_ver = h.res_ver") // @formatter:on public class ResourceSearchView implements IBaseResourceEntity, Serializable { @@ -142,7 +134,7 @@ public class ResourceSearchView implements IBaseResourceEntity, Serializable { return new IdDt(myResourceType + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion()); } else { return new IdDt( - getResourceType() + '/' + getForcedId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion()); + getResourceType() + '/' + getForcedId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion()); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java index d3ed16c910b..ee0f074c6f7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java @@ -19,9 +19,9 @@ import static org.apache.commons.lang3.StringUtils.left; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchParamPresent.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchParamPresent.java index d5245966225..d6910942929 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchParamPresent.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/SearchParamPresent.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,6 +28,7 @@ import java.io.Serializable; @Entity @Table(name = "HFJ_RES_PARAM_PRESENT", indexes = { + // We used to have a constraint named IDX_RESPARMPRESENT_SPID_RESID - Don't reuse @Index(name = "IDX_RESPARMPRESENT_RESID", columnList = "RES_ID"), @Index(name = "IDX_RESPARMPRESENT_HASHPRES", columnList = "HASH_PRESENCE") }) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java index 1b501d9e1c8..ddf2c1f57b3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java @@ -29,9 +29,9 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -93,15 +93,17 @@ public class TermConcept implements Serializable { private Long myIndexStatus; @Field(name = "myParentPids", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "conceptParentPidsAnalyzer")) @Lob - @Column(name="PARENT_PIDS", nullable = true) + @Column(name = "PARENT_PIDS", nullable = true) private String myParentPids; @OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myChild") private Collection myParents; @Column(name = "CODE_SEQUENCE", nullable = true) private Integer mySequence; + public TermConcept() { super(); } + public TermConcept(TermCodeSystemVersion theCs, String theCode) { setCodeSystemVersion(theCs); setCode(theCode); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java index bd99a8c3196..295b5eb18e4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMap.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMap.java index 2589a6bd990..fc87b952b46 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMap.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMap.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroup.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroup.java index 1d1bd26ad00..3056768843f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroup.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroup.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,7 +38,7 @@ public class TermConceptMapGroup implements Serializable { private Long myId; @ManyToOne() - @JoinColumn(name = "CONCEPT_MAP_PID", nullable = false, referencedColumnName = "PID", foreignKey=@ForeignKey(name="FK_TCMGROUP_CONCEPTMAP")) + @JoinColumn(name = "CONCEPT_MAP_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TCMGROUP_CONCEPTMAP")) private TermConceptMap myConceptMap; @Column(name = "SOURCE_URL", nullable = false, length = 200) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElement.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElement.java index 5045f21ddfe..b9a67e44979 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElement.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElement.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -42,7 +42,7 @@ public class TermConceptMapGroupElement implements Serializable { private Long myId; @ManyToOne() - @JoinColumn(name = "CONCEPT_MAP_GROUP_PID", nullable = false, referencedColumnName = "PID", foreignKey=@ForeignKey(name="FK_TCMGELEMENT_GROUP")) + @JoinColumn(name = "CONCEPT_MAP_GROUP_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TCMGELEMENT_GROUP")) private TermConceptMapGroup myConceptMapGroup; @Column(name = "SOURCE_CODE", nullable = false, length = TermConcept.CODE_LENGTH) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElementTarget.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElementTarget.java index df48504e6e8..cfa0a5f7625 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElementTarget.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptMapGroupElementTarget.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -41,7 +41,7 @@ public class TermConceptMapGroupElementTarget implements Serializable { private Long myId; @ManyToOne() - @JoinColumn(name = "CONCEPT_MAP_GRP_ELM_PID", nullable = false, referencedColumnName = "PID", foreignKey=@ForeignKey(name="FK_TCMGETARGET_ELEMENT")) + @JoinColumn(name = "CONCEPT_MAP_GRP_ELM_PID", nullable = false, referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_TCMGETARGET_ELEMENT")) private TermConceptMapGroupElement myConceptMapGroupElement; @Column(name = "TARGET_CODE", nullable = false, length = TermConcept.CODE_LENGTH) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java index e5aabc32a43..bf568bfb1c0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyFieldBridge.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyFieldBridge.java index 72641bdce07..30e822e0525 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyFieldBridge.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyFieldBridge.java @@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity; * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java index 943fc6b0451..5557e57e067 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java @@ -105,7 +105,7 @@ public class TestR4Config extends BaseJavaConfigR4 { DataSource dataSource = ProxyDataSourceBuilder .create(retVal) -// .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") + .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") .logSlowQueryBySlf4j(10, TimeUnit.SECONDS) .countQuery(new ThreadQueryCountHolder()) .build(); diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java index 001d143e816..bcd4948182d 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/JdbcUtils.java @@ -1,17 +1,22 @@ package ca.uhn.fhir.jpa.migrate; +import ca.uhn.fhir.jpa.migrate.taskdef.BaseTableColumnTypeTask; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.jdbc.core.ColumnMapRowMapper; +import org.thymeleaf.util.StringUtils; import javax.sql.DataSource; -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.ResultSet; -import java.sql.SQLException; +import java.sql.*; import java.util.HashSet; +import java.util.Locale; import java.util.Objects; import java.util.Set; public class JdbcUtils { + private static final Logger ourLog = LoggerFactory.getLogger(JdbcUtils.class); + /** * Retrieve all index names */ @@ -19,14 +24,18 @@ public class JdbcUtils { DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); Connection connection = dataSource.getConnection(); return theConnectionProperties.getTxTemplate().execute(t -> { - DatabaseMetaData metadata = null; + DatabaseMetaData metadata; try { metadata = connection.getMetaData(); ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, false); Set indexNames = new HashSet<>(); while (indexes.next()) { + + ourLog.debug("*** Next index: {}", new ColumnMapRowMapper().mapRow(indexes, 0)); + String indexName = indexes.getString("INDEX_NAME"); + indexName = StringUtils.toUpperCase(indexName, Locale.US); indexNames.add(indexName); } @@ -37,4 +46,92 @@ public class JdbcUtils { }); } + + @SuppressWarnings("ConstantConditions") + public static boolean isIndexUnique(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theIndexName) throws SQLException { + DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); + Connection connection = dataSource.getConnection(); + return theConnectionProperties.getTxTemplate().execute(t -> { + DatabaseMetaData metadata; + try { + metadata = connection.getMetaData(); + ResultSet indexes = metadata.getIndexInfo(null, null, theTableName, false, false); + + while (indexes.next()) { + String indexName = indexes.getString("INDEX_NAME"); + if (indexName.equalsIgnoreCase(theIndexName)) { + boolean nonUnique = indexes.getBoolean("NON_UNIQUE"); + return !nonUnique; + } + } + + } catch (SQLException e) { + throw new InternalErrorException(e); + } + + throw new InternalErrorException("Can't find index: " + theIndexName + " on table " + theTableName); + }); + + } + + /** + * Retrieve all index names + */ + public static String getColumnType(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName, String theColumnName) throws SQLException { + DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); + try (Connection connection = dataSource.getConnection()) { + return theConnectionProperties.getTxTemplate().execute(t -> { + DatabaseMetaData metadata; + try { + metadata = connection.getMetaData(); + ResultSet indexes = metadata.getColumns(null, null, theTableName, theColumnName); + + indexes.next(); + + int dataType = indexes.getInt("DATA_TYPE"); + Long length = indexes.getLong("COLUMN_SIZE"); + switch (dataType) { + case Types.VARCHAR: + return BaseTableColumnTypeTask.ColumnTypeEnum.STRING.getDescriptor(length); + case Types.BIGINT: + return BaseTableColumnTypeTask.ColumnTypeEnum.LONG.getDescriptor(length); + default: + throw new IllegalArgumentException("Don't know how to handle datatype: " + dataType); + } + + } catch (SQLException e) { + throw new InternalErrorException(e); + } + + }); + } + } + + /** + * Retrieve all index names + */ + public static Set getColumnNames(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theTableName) throws SQLException { + DataSource dataSource = Objects.requireNonNull(theConnectionProperties.getDataSource()); + Connection connection = dataSource.getConnection(); + return theConnectionProperties.getTxTemplate().execute(t -> { + DatabaseMetaData metadata; + try { + metadata = connection.getMetaData(); + ResultSet indexes = metadata.getColumns(null, null, theTableName, null); + + Set columnNames = new HashSet<>(); + while (indexes.next()) { + String columnName = indexes.getString("COLUMN_NAME"); + columnName = StringUtils.toUpperCase(columnName, Locale.US); + columnNames.add(columnName); + } + + return columnNames; + } catch (SQLException e) { + throw new InternalErrorException(e); + } + }); + + } + } diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java new file mode 100644 index 00000000000..291e511e51a --- /dev/null +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTask.java @@ -0,0 +1,30 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.migrate.JdbcUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.SQLException; +import java.util.Set; + +public class AddColumnTask extends BaseTableColumnTypeTask { + + private static final Logger ourLog = LoggerFactory.getLogger(AddColumnTask.class); + + + @Override + public void execute() throws SQLException { + Set columnNames = JdbcUtils.getColumnNames(getConnectionProperties(), getTableName()); + if (columnNames.contains(getColumnName())) { + ourLog.info("Column {} already exists on table {} - No action performed", getColumnName(), getTableName()); + return; + } + + String type = getSqlType(); + String nullable = getSqlNotNull(); + String sql = "alter table " + getTableName() + " add column " + getColumnName() + " " + type + " " + nullable; + ourLog.info("Adding column {} of type {} to table {}", getColumnName(), type, getTableName()); + executeSql(sql); + } + +} diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java index 60e668d52b1..a90aceca215 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/AddIndexTask.java @@ -4,26 +4,23 @@ import ca.uhn.fhir.jpa.migrate.JdbcUtils; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.thymeleaf.util.StringUtils; import java.sql.SQLException; import java.util.Arrays; import java.util.List; +import java.util.Locale; import java.util.Set; -public class AddIndexTask extends BaseTask { +public class AddIndexTask extends BaseTableTask { private static final Logger ourLog = LoggerFactory.getLogger(AddIndexTask.class); - private String myTableName; private String myIndexName; private List myColumns; private Boolean myUnique; - public void setTableName(String theTableName) { - myTableName = theTableName; - } - public void setIndexName(String theIndexName) { - myIndexName = theIndexName; + myIndexName = StringUtils.toUpperCase(theIndexName, Locale.US); } public void setColumns(List theColumns) { @@ -36,23 +33,23 @@ public class AddIndexTask extends BaseTask { @Override public void validate() { + super.validate(); Validate.notBlank(myIndexName, "Index name not specified"); - Validate.notBlank(myTableName, "Table name not specified"); Validate.isTrue(myColumns.size() > 0, "Columns not specified"); Validate.notNull(myUnique, "Uniqueness not specified"); } @Override public void execute() throws SQLException { - Set indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), myTableName); + Set indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName()); if (indexNames.contains(myIndexName)) { - ourLog.info("Index {} already exists on table {} - No action performed", myIndexName, myTableName); + ourLog.info("Index {} already exists on table {} - No action performed", myIndexName, getTableName()); return; } String unique = myUnique ? "UNIQUE " : ""; String columns = String.join(", ", myColumns); - String sql = "CREATE " + unique + " INDEX " + myIndexName + " ON " + myTableName + "(" + columns + ")"; + String sql = "CREATE " + unique + " INDEX " + myIndexName + " ON " + getTableName() + "(" + columns + ")"; executeSql(sql); } diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java new file mode 100644 index 00000000000..eddfa0a41b1 --- /dev/null +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTask.java @@ -0,0 +1,90 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.jdbc.core.ColumnMapRowMapper; +import org.springframework.jdbc.core.JdbcTemplate; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +public class ArbitrarySqlTask extends BaseTask { + + private static final Logger ourLog = LoggerFactory.getLogger(ArbitrarySqlTask.class); + private final String myDescription; + private List myTask = new ArrayList<>(); + private int myBatchSize = 1000; + + public ArbitrarySqlTask(String theDescription) { + myDescription = theDescription; + } + + public void addQuery(String theSql, QueryModeEnum theMode, Consumer> theConsumer) { + myTask.add(new QueryTask(theSql, theMode, theConsumer)); + } + + @Override + public void validate() { + // nothing + } + + @Override + public void execute() throws SQLException { + ourLog.info("Starting: {}", myDescription); + + for (Task next : myTask) { + next.execute(); + } + + } + + public void setBatchSize(int theBatchSize) { + myBatchSize = theBatchSize; + } + + public enum QueryModeEnum { + BATCH_UNTIL_NO_MORE + } + + private abstract class Task { + public abstract void execute(); + } + + private class QueryTask extends Task { + private final String mySql; + private final QueryModeEnum myMode; + private final Consumer> myConsumer; + + public QueryTask(String theSql, QueryModeEnum theMode, Consumer> theConsumer) { + mySql = theSql; + myMode = theMode; + myConsumer = theConsumer; + } + + + @Override + public void execute() { + List> rows; + do { + ourLog.info("Querying for up to {} rows", myBatchSize); + rows = getTxTemplate().execute(t -> { + JdbcTemplate jdbcTemplate = newJdbcTemnplate(); + jdbcTemplate.setMaxRows(myBatchSize); + return jdbcTemplate.query(mySql, new ColumnMapRowMapper()); + }); + + ourLog.info("Processing {} rows", rows.size()); + List> finalRows = rows; + getTxTemplate().execute(t -> { + for (Map nextRow : finalRows) { + myConsumer.accept(nextRow); + } + return null; + }); + } while (rows.size() > 0); + } + } +} diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTask.java new file mode 100644 index 00000000000..a37318bc1bb --- /dev/null +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTask.java @@ -0,0 +1,30 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import org.apache.commons.lang3.Validate; +import org.thymeleaf.util.StringUtils; + +import java.util.Locale; + +public abstract class BaseTableColumnTask extends BaseTableTask { + + private String myColumnName; + + @SuppressWarnings("unchecked") + public T setColumnName(String theColumnName) { + myColumnName = StringUtils.toUpperCase(theColumnName, Locale.US); + return (T) this; + } + + + public String getColumnName() { + return myColumnName; + } + + @Override + public void validate() { + super.validate(); + Validate.notBlank(myColumnName, "Column name not specified"); + } + + +} diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java new file mode 100644 index 00000000000..0564551d257 --- /dev/null +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableColumnTypeTask.java @@ -0,0 +1,122 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.migrate.DriverTypeEnum; +import org.apache.commons.lang3.Validate; +import org.springframework.util.Assert; + +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +public abstract class BaseTableColumnTypeTask extends BaseTableColumnTask { + + private ColumnTypeEnum myColumnType; + private Map> myColumnTypeToDriverTypeToSqlType = new HashMap<>(); + private Boolean myNullable; + private Long myColumnLength; + + /** + * Constructor + */ + BaseTableColumnTypeTask() { + setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.DERBY_EMBEDDED, "bigint"); + setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.MARIADB_10_1, "bigint"); + setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.MYSQL_5_7, "bigint"); + setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.MSSQL_2012, "bigint"); + setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.ORACLE_12C, "number(19,0)"); + setColumnType(ColumnTypeEnum.LONG, DriverTypeEnum.POSTGRES_9_4, "int8"); + + setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.DERBY_EMBEDDED, "varchar(?)"); + setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.MARIADB_10_1, "varchar(?)"); + setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.MYSQL_5_7, "varchar(?)"); + setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.MSSQL_2012, "varchar(?)"); + setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.ORACLE_12C, "varchar2(?)"); + setColumnType(ColumnTypeEnum.STRING, DriverTypeEnum.POSTGRES_9_4, "varchar(?)"); + } + + public ColumnTypeEnum getColumnType() { + return myColumnType; + } + + private void setColumnType(ColumnTypeEnum theColumnType, DriverTypeEnum theDriverType, String theColumnTypeSql) { + Map columnSqlType = myColumnTypeToDriverTypeToSqlType.computeIfAbsent(theColumnType, k -> new HashMap<>()); + if (columnSqlType.containsKey(theDriverType)) { + throw new IllegalStateException("Duplicate key: " + theDriverType); + } + columnSqlType.put(theDriverType, theColumnTypeSql); + } + + + @Override + public void validate() { + super.validate(); + Validate.notNull(myColumnType); + Validate.notNull(myNullable); + + if (myColumnType == ColumnTypeEnum.STRING) { + Validate.notNull(myColumnLength); + } else { + Validate.isTrue(myColumnLength == null); + } + } + + @SuppressWarnings("unchecked") + public T setColumnType(ColumnTypeEnum theColumnType) { + myColumnType = theColumnType; + return (T) this; + } + + protected String getSqlType() { + String retVal = myColumnTypeToDriverTypeToSqlType.get(myColumnType).get(getDriverType()); + Objects.requireNonNull(retVal); + + if (myColumnType == ColumnTypeEnum.STRING) { + retVal = retVal.replace("?", Long.toString(getColumnLength())); + } + + return retVal; + } + + public boolean isNullable() { + return myNullable; + } + + public void setNullable(boolean theNullable) { + myNullable = theNullable; + } + + protected String getSqlNotNull() { + return isNullable() ? "" : " not null"; + } + + public Long getColumnLength() { + return myColumnLength; + } + + public void setColumnLength(int theColumnLength) { + myColumnLength = (long) theColumnLength; + } + + + public enum ColumnTypeEnum { + + LONG { + @Override + public String getDescriptor(Long theColumnLength) { + Assert.isTrue(theColumnLength == null, "Must not supply a column length"); + return "bigint"; + } + }, + STRING { + @Override + public String getDescriptor(Long theColumnLength) { + Assert.isTrue(theColumnLength != null, "Must supply a column length"); + return "varchar(" + theColumnLength + ")"; + } + }; + + public abstract String getDescriptor(Long theColumnLength); + + } + +} diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableTask.java new file mode 100644 index 00000000000..a46b6a82ac8 --- /dev/null +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTableTask.java @@ -0,0 +1,21 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import org.apache.commons.lang3.Validate; + +public abstract class BaseTableTask extends BaseTask { + private String myTableName; + + public String getTableName() { + return myTableName; + } + + public T setTableName(String theTableName) { + myTableName = theTableName; + return (T) this; + } + + @Override + public void validate() { + Validate.notBlank(myTableName); + } +} diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java index 687aed0fdc7..24d30301c41 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTask.java @@ -34,7 +34,7 @@ public abstract class BaseTask { Integer changes = getConnectionProperties().getTxTemplate().execute(t -> { JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate(); int changesCount = jdbcTemplate.update(theSql, theArguments); - ourLog.info("SQL {} returned {}", theSql, changesCount); + ourLog.info("SQL \"{}\" returned {}", theSql, changesCount); return changesCount; }); diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java index c2beae48234..7405cec29d7 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/CalculateHashesTask.java @@ -1,57 +1,129 @@ package ca.uhn.fhir.jpa.migrate.taskdef; +import ca.uhn.fhir.util.StopWatch; +import com.google.common.collect.ForwardingMap; import org.apache.commons.lang3.Validate; +import org.checkerframework.checker.nullness.compatqual.NullableDecl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.core.ResultSetExtractor; -import org.springframework.jdbc.core.RowMapper; -import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Consumer; import java.util.function.Function; -public class CalculateHashesTask extends BaseTask { +public class CalculateHashesTask extends BaseTableColumnTask { - private String myTableName; - private String myColumnName; + private static final Logger ourLog = LoggerFactory.getLogger(CalculateHashesTask.class); + private int myBatchSize = 10000; + private Map, Long>> myCalculators = new HashMap<>(); - public void setTableName(String theTableName) { - myTableName = theTableName; + public void setBatchSize(int theBatchSize) { + myBatchSize = theBatchSize; } - public void setColumnName(String theColumnName) { - myColumnName = theColumnName; - } - - @Override - public void validate() { - Validate.notBlank(myTableName); - Validate.notBlank(myColumnName); - } @Override public void execute() { - List> rows = getTxTemplate().execute(t->{ - JdbcTemplate jdbcTemplate = newJdbcTemnplate(); - int batchSize = 10000; - jdbcTemplate.setMaxRows(batchSize); - String sql = "SELECT * FROM " + myTableName + " WHERE " + myColumnName + " IS NULL"; - ourLog.info("Loading up to {} rows in {} with no hashes", batchSize, myTableName); - return jdbcTemplate.queryForList(sql); + List> rows; + do { + rows = getTxTemplate().execute(t -> { + JdbcTemplate jdbcTemplate = newJdbcTemnplate(); + jdbcTemplate.setMaxRows(myBatchSize); + String sql = "SELECT * FROM " + getTableName() + " WHERE " + getColumnName() + " IS NULL"; + ourLog.info("Finding up to {} rows in {} that requires hashes", myBatchSize, getTableName()); + return jdbcTemplate.queryForList(sql); + }); + + updateRows(rows); + } while (rows.size() > 0); + } + + private void updateRows(List> theRows) { + StopWatch sw = new StopWatch(); + getTxTemplate().execute(t -> { + + // Loop through rows + assert theRows != null; + for (Map nextRow : theRows) { + + Map newValues = new HashMap<>(); + MandatoryKeyMap nextRowMandatoryKeyMap = new MandatoryKeyMap<>(nextRow); + + // Apply calculators + for (Map.Entry, Long>> nextCalculatorEntry : myCalculators.entrySet()) { + String nextColumn = nextCalculatorEntry.getKey(); + Function, Long> nextCalculator = nextCalculatorEntry.getValue(); + Long value = nextCalculator.apply(nextRowMandatoryKeyMap); + newValues.put(nextColumn, value); + } + + // Generate update SQL + StringBuilder sqlBuilder = new StringBuilder(); + List arguments = new ArrayList<>(); + sqlBuilder.append("UPDATE "); + sqlBuilder.append(getTableName()); + sqlBuilder.append(" SET "); + for (Map.Entry nextNewValueEntry : newValues.entrySet()) { + if (arguments.size() > 0) { + sqlBuilder.append(", "); + } + sqlBuilder.append(nextNewValueEntry.getKey()).append(" = ?"); + arguments.add(nextNewValueEntry.getValue()); + } + sqlBuilder.append(" WHERE SP_ID = ?"); + arguments.add((Long) nextRow.get("SP_ID")); + + // Apply update SQL + newJdbcTemnplate().update(sqlBuilder.toString(), arguments.toArray()); + + } + + return theRows.size(); }); + ourLog.info("Updated {} rows on {} in {}", theRows.size(), getTableName(), sw.toString()); + } - + public CalculateHashesTask addCalculator(String theColumnName, Function, Long> theConsumer) { + Validate.isTrue(myCalculators.containsKey(theColumnName) == false); + myCalculators.put(theColumnName, theConsumer); + return this; } - private Map, Long>> myColumnMappers; + public static class MandatoryKeyMap extends ForwardingMap { - public void addCalculator(String theColumnName, Function, Long> theConsumer) { + private final Map myWrap; + public MandatoryKeyMap(Map theWrap) { + myWrap = theWrap; + } + + @Override + public V get(@NullableDecl Object theKey) { + if (!containsKey(theKey)) { + throw new IllegalArgumentException("No key: " + theKey); + } + return super.get(theKey); + } + + public String getString(String theKey) { + return (String) get(theKey); + } + + @Override + protected Map delegate() { + return myWrap; + } + + public String getResourceType() { + return getString("RES_TYPE"); + } + + public String getParamName() { + return getString("SP_NAME"); + } } - - private static final Logger ourLog = LoggerFactory.getLogger(CalculateHashesTask.class); } diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIndexTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIndexTask.java index bceea5417ea..62d5c50682a 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIndexTask.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/DropIndexTask.java @@ -8,58 +8,74 @@ import org.slf4j.LoggerFactory; import java.sql.SQLException; import java.util.Set; -public class DropIndexTask extends BaseTask { +public class DropIndexTask extends BaseTableTask { private static final Logger ourLog = LoggerFactory.getLogger(DropIndexTask.class); private String myIndexName; - private String myTableName; @Override public void validate() { + super.validate(); Validate.notBlank(myIndexName, "The index name must not be blank"); - Validate.notBlank(myTableName, "The table name must not be blank"); if (getDescription() == null) { - setDescription("Drop index " + myIndexName + " on table " + myTableName); + setDescription("Drop index " + myIndexName + " on table " + getTableName()); } } @Override public void execute() throws SQLException { - Set indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), myTableName); + Set indexNames = JdbcUtils.getIndexNames(getConnectionProperties(), getTableName()); if (!indexNames.contains(myIndexName)) { - ourLog.info("Index {} does not exist on table {} - No action needed", myIndexName, myTableName); + ourLog.info("Index {} does not exist on table {} - No action needed", myIndexName, getTableName()); return; } - ourLog.info("Dropping index {} on table {}", myIndexName, myTableName); + boolean isUnique = JdbcUtils.isIndexUnique(getConnectionProperties(), getTableName(), myIndexName); + String uniquenessString = isUnique ? "unique" : "non-unique"; + ourLog.info("Dropping {} index {} on table {}", uniquenessString, myIndexName, getTableName()); String sql = null; - switch (getDriverType()) { - case MYSQL_5_7: - case MARIADB_10_1: - sql = "ALTER TABLE " + myTableName + " DROP INDEX " + myIndexName; - break; - case POSTGRES_9_4: - case DERBY_EMBEDDED: - case ORACLE_12C: - sql = "DROP INDEX " + myIndexName; - break; - case MSSQL_2012: - sql = "DROP INDEX " + myTableName + "." + myIndexName; - break; + + if (isUnique) { + // Drop constraint + switch (getDriverType()) { + case MYSQL_5_7: + case MARIADB_10_1: + sql = "ALTER TABLE " + getTableName() + " DROP INDEX " + myIndexName; + break; + case DERBY_EMBEDDED: + sql = "DROP INDEX " + myIndexName; + break; + case POSTGRES_9_4: + case ORACLE_12C: + case MSSQL_2012: + sql = "ALTER TABLE " + getTableName() + " DROP CONSTRAINT " + myIndexName; + break; + } + } else { + // Drop index + switch (getDriverType()) { + case MYSQL_5_7: + case MARIADB_10_1: + sql = "ALTER TABLE " + getTableName() + " DROP INDEX " + myIndexName; + break; + case POSTGRES_9_4: + case DERBY_EMBEDDED: + case ORACLE_12C: + sql = "DROP INDEX " + myIndexName; + break; + case MSSQL_2012: + sql = "DROP INDEX " + getTableName() + "." + myIndexName; + break; + } } executeSql(sql); } - public DropIndexTask setTableName(String theTableName) { - myTableName = theTableName; - return this; - } - public DropIndexTask setIndexName(String theIndexName) { myIndexName = theIndexName; return this; diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java new file mode 100644 index 00000000000..12985ce51fb --- /dev/null +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTask.java @@ -0,0 +1,69 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.migrate.JdbcUtils; +import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.SQLException; + +public class ModifyColumnTask extends BaseTableColumnTypeTask { + + private static final Logger ourLog = LoggerFactory.getLogger(ModifyColumnTask.class); + + + @Override + public void execute() { + + String existingType; + try { + existingType = JdbcUtils.getColumnType(getConnectionProperties(), getTableName(), getColumnName()); + } catch (SQLException e) { + throw new InternalErrorException(e); + } + + String wantedType = getColumnType().getDescriptor(getColumnLength()); + if (existingType.equals(wantedType)) { + ourLog.info("Column {} on table {} is already of type {} - No action performed", getColumnName(), getTableName(), wantedType); + return; + } + + String type = getSqlType(); + String notNull = getSqlNotNull(); + + String sql; + String sqlNotNull = null; + switch (getDriverType()) { + case DERBY_EMBEDDED: + sql = "alter table " + getTableName() + " alter column " + getColumnName() + " set data type " + type; + break; + case MARIADB_10_1: + case MYSQL_5_7: + sql = "alter table " + getTableName() + " modify column " + getColumnName() + " " + type + notNull; + break; + case POSTGRES_9_4: + sql = "alter table " + getTableName() + " alter column " + getColumnName() + " type " + type; + if (isNullable() == false) { + sqlNotNull = "alter table " + getTableName() + " alter column " + getColumnName() + " set not null"; + } + break; + case ORACLE_12C: + sql = "alter table " + getTableName() + " modify " + getColumnName() + " " + type + notNull; + break; + case MSSQL_2012: + sql = "alter table " + getTableName() + " alter column " + getColumnName() + " " + type + notNull; + break; + default: + throw new IllegalStateException("Dont know how to handle " + getDriverType()); + } + + ourLog.info("Updating column {} on table {} to type {}", getColumnName(), getTableName(), type); + executeSql(sql); + + if (sqlNotNull != null) { + ourLog.info("Updating column {} on table {} to not null", getColumnName(), getTableName()); + executeSql(sqlNotNull); + } + } + +} diff --git a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java index 150915e9336..7a94f697953 100644 --- a/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java +++ b/hapi-fhir-jpaserver-migrate/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java @@ -1,10 +1,8 @@ package ca.uhn.fhir.jpa.migrate.tasks; -import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamCoords; -import ca.uhn.fhir.jpa.migrate.taskdef.AddIndexTask; -import ca.uhn.fhir.jpa.migrate.taskdef.BaseTask; -import ca.uhn.fhir.jpa.migrate.taskdef.CalculateHashesTask; -import ca.uhn.fhir.jpa.migrate.taskdef.DropIndexTask; +import ca.uhn.fhir.jpa.dao.DaoConfig; +import ca.uhn.fhir.jpa.entity.*; +import ca.uhn.fhir.jpa.migrate.taskdef.*; import ca.uhn.fhir.util.VersionEnum; import com.google.common.collect.Multimap; import com.google.common.collect.MultimapBuilder; @@ -20,31 +18,240 @@ public class HapiFhirJpaMigrationTasks { public HapiFhirJpaMigrationTasks() { // Forced ID changes - forVersion(VersionEnum.V3_5_0) - .onTable("HFJ_FORCED_ID") + Builder.BuilderWithTableName forcedId = forVersion(VersionEnum.V3_5_0).onTable("HFJ_FORCED_ID"); + forcedId .dropIndex("IDX_FORCEDID_TYPE_FORCEDID"); - forVersion(VersionEnum.V3_5_0) - .onTable("HFJ_FORCED_ID") + forcedId .dropIndex("IDX_FORCEDID_TYPE_RESID"); - forVersion(VersionEnum.V3_5_0) - .onTable("HFJ_FORCED_ID") + forcedId .addIndex("IDX_FORCEDID_TYPE_FID") .unique(true) .withColumns("RESOURCE_TYPE", "FORCED_ID"); // Indexes - Coords - forVersion(VersionEnum.V3_5_0) - .onTable("HFJ_SPIDX_COORDS") + Builder.BuilderWithTableName spidxCoords = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_COORDS"); + spidxCoords .dropIndex("IDX_SP_COORDS_HASH"); - forVersion(VersionEnum.V3_5_0) - .onTable("HFJ_SPIDX_COORDS") + spidxCoords + .addColumn("HASH_IDENTITY") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxCoords .addIndex("IDX_SP_COORDS_HASH") .unique(false) .withColumns("HASH_IDENTITY", "SP_VALUE", "SP_LATITUDE", "SP_LONGITUDE"); - forVersion(VersionEnum.V3_5_0) - .addTask(new CalculateHashesTask().calculator(()->{ - return ResourceIndexedSearchParamCoords.calculateHashIdentity("resourceType", "paramName"); - })); + spidxCoords + .addTask(new CalculateHashesTask() + .setColumnName("HASH_IDENTITY") + .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME"))) + ); + + // Indexes - Date + Builder.BuilderWithTableName spidxDate = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_DATE"); + spidxDate + .dropIndex("IDX_SP_TOKEN"); + spidxDate + .addColumn("HASH_IDENTITY") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxDate + .addIndex("IDX_SP_DATE_HASH") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_VALUE_LOW", "SP_VALUE_HIGH"); + spidxDate + .addTask(new CalculateHashesTask() + .setColumnName("HASH_IDENTITY") + .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME"))) + ); + + // Indexes - Number + Builder.BuilderWithTableName spidxNumber = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_NUMBER"); + spidxNumber + .dropIndex("IDX_SP_NUMBER"); + spidxNumber + .addColumn("HASH_IDENTITY") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxNumber + .addIndex("IDX_SP_NUMBER_HASH_VAL") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_VALUE"); + spidxNumber + .addTask(new CalculateHashesTask() + .setColumnName("HASH_IDENTITY") + .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME"))) + ); + + // Indexes - Quantity + Builder.BuilderWithTableName spidxQuantity = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_QUANTITY"); + spidxQuantity + .dropIndex("IDX_SP_QUANTITY"); + spidxQuantity + .addColumn("HASH_IDENTITY") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxQuantity + .addColumn("HASH_IDENTITY_SYS_UNITS") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxQuantity + .addColumn("HASH_IDENTITY_AND_UNITS") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxQuantity + .addIndex("IDX_SP_QUANTITY_HASH") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_VALUE"); + spidxQuantity + .addIndex("IDX_SP_QUANTITY_HASH_UN") + .unique(false) + .withColumns("HASH_IDENTITY_AND_UNITS", "SP_VALUE"); + spidxQuantity + .addIndex("IDX_SP_QUANTITY_HASH_SYSUN") + .unique(false) + .withColumns("HASH_IDENTITY_SYS_UNITS", "SP_VALUE"); + spidxQuantity + .addTask(new CalculateHashesTask() + .setColumnName("HASH_IDENTITY") + .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME"))) + .addCalculator("HASH_IDENTITY_AND_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashUnits(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_UNITS"))) + .addCalculator("HASH_IDENTITY_SYS_UNITS", t -> ResourceIndexedSearchParamQuantity.calculateHashSystemAndUnits(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_SYSTEM"), t.getString("SP_UNITS"))) + ); + + // Indexes - String + Builder.BuilderWithTableName spidxString = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_STRING"); + spidxString + .dropIndex("IDX_SP_STRING"); + spidxString + .addColumn("HASH_NORM_PREFIX") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxString + .addColumn("HASH_NORM") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxString + .addIndex("IDX_SP_STRING_HASH_NRM") + .unique(false) + .withColumns("HASH_NORM_PREFIX", "SP_VALUE_NORMALIZED"); + spidxString + .addIndex("IDX_SP_STRING_HASH_EXCT") + .unique(false) + .withColumns("HASH_EXACT"); + spidxString + .addTask(new CalculateHashesTask() + .setColumnName("HASH_IDENTITY") + .addCalculator("IDX_SP_STRING_HASH_NRM", t -> ResourceIndexedSearchParamString.calculateHashNormalized(new DaoConfig(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_VALUE_NORMALIZED"))) + .addCalculator("IDX_SP_STRING_HASH_EXCT", t -> ResourceIndexedSearchParamString.calculateHashExact(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE_EXACT"))) + ); + + // Indexes - Token + Builder.BuilderWithTableName spidxToken = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_TOKEN"); + spidxToken + .dropIndex("IDX_SP_TOKEN"); + spidxToken + .dropIndex("IDX_SP_TOKEN_UNQUAL"); + spidxToken + .addColumn("HASH_IDENTITY") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxToken + .addColumn("HASH_SYS") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxToken + .addColumn("HASH_SYS_AND_VALUE") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxToken + .addColumn("HASH_VALUE") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxToken + .addIndex("IDX_SP_TOKEN_HASH") + .unique(false) + .withColumns("HASH_IDENTITY"); + spidxToken + .addIndex("IDX_SP_TOKEN_HASH_S") + .unique(false) + .withColumns("HASH_SYS"); + spidxToken + .addIndex("IDX_SP_TOKEN_HASH_SV") + .unique(false) + .withColumns("HASH_SYS_AND_VALUE"); + spidxToken + .addIndex("IDX_SP_TOKEN_HASH_V") + .unique(false) + .withColumns("HASH_VALUE"); + spidxToken + .addTask(new CalculateHashesTask() + .setColumnName("HASH_IDENTITY") + .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME"))) + .addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"))) + .addCalculator("HASH_SYS_AND_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"), t.getString("SP_VALUE"))) + .addCalculator("HASH_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashValue(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE"))) + ); + + // Indexes - URI + Builder.BuilderWithTableName spidxUri = forVersion(VersionEnum.V3_5_0).onTable("HFJ_SPIDX_URI"); + spidxUri + .addColumn("HASH_IDENTITY") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spidxUri + .addIndex("IDX_SP_URI_HASH_IDENTITY") + .unique(false) + .withColumns("HASH_IDENTITY", "SP_URI"); + spidxUri + .addIndex("IDX_SP_URI_HASH_URI") + .unique(false) + .withColumns("HASH_URI"); + spidxUri + .addTask(new CalculateHashesTask() + .setColumnName("HASH_IDENTITY") + .addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME"))) + .addCalculator("HASH_URI", t -> ResourceIndexedSearchParamUri.calculateHashUri(t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_URI"))) + ); + + // Search Parameter Presence + Builder.BuilderWithTableName spp = forVersion(VersionEnum.V3_5_0).onTable("HFJ_RES_PARAM_PRESENT"); + spp.dropIndex("IDX_RESPARMPRESENT_SPID_RESID"); + spp + .addColumn("HASH_PRESENCE") + .nullable() + .type(AddColumnTask.ColumnTypeEnum.LONG); + spp + .addIndex("IDX_RESPARMPRESENT_HASHPRES") + .unique(false) + .withColumns("HASH_PRESENCE"); + ArbitrarySqlTask consolidateSearchParamPresenceIndexesTask = new ArbitrarySqlTask("Consolidate search parameter presence indexes"); + consolidateSearchParamPresenceIndexesTask.setBatchSize(1); + String sql = "SELECT " + + "HFJ_SEARCH_PARM.RES_TYPE RES_TYPE, HFJ_SEARCH_PARM.PARAM_NAME PARAM_NAME, " + + "HFJ_RES_PARAM_PRESENT.PID PID, HFJ_RES_PARAM_PRESENT.SP_ID SP_ID, HFJ_RES_PARAM_PRESENT.SP_PRESENT SP_PRESENT, HFJ_RES_PARAM_PRESENT.HASH_PRESENT HASH_PRESENT " + + "from HFJ_RES_PARAM_PRESENT " + + "join HFJ_SEARCH_PARM ON (HFJ_SEARCH_PARM.PID = HFJ_RES_PARAM_PRESENT.SP_ID) " + + "where HFJ_RES_PARAM_PRESENT.HASH_PRESENT is null"; + consolidateSearchParamPresenceIndexesTask.addQuery(sql, ArbitrarySqlTask.QueryModeEnum.BATCH_UNTIL_NO_MORE, t -> { + Long pid = (Long) t.get("PID"); + Boolean present = (Boolean) t.get("SP_PRESENT"); + String resType = (String) t.get("RES_TYPE"); + String paramName = (String) t.get("PARAM_NAME"); + Long hash = SearchParamPresent.calculateHashPresence(resType, paramName, present); + consolidateSearchParamPresenceIndexesTask.executeSql("update HFJ_RES_PARAM_PRESENT set HASH_PRESENCE = ? where PID = ?", hash, pid); + }); + forVersion(VersionEnum.V3_5_0).addTask(consolidateSearchParamPresenceIndexesTask); + + // Concept + Builder.BuilderWithTableName trmConcept = forVersion(VersionEnum.V3_5_0).onTable("TRM_CONCEPT"); + trmConcept + .addIndex("IDX_CONCEPT_UPDATED") + .unique(false) + .withColumns("CONCEPT_UPDATED"); + trmConcept + .modifyColumn("CODE") + .nonNullable() + .withType(BaseTableColumnTypeTask.ColumnTypeEnum.STRING, 500); } @@ -74,6 +281,7 @@ public class HapiFhirJpaMigrationTasks { private class BuilderWithTableName { private String myIndexName; + private String myColumnName; void dropIndex(String theIndexName) { DropIndexTask task = new DropIndexTask(); @@ -87,6 +295,21 @@ public class HapiFhirJpaMigrationTasks { return new BuilderAddIndexWithName(); } + public BuilderAddColumnWithName addColumn(String theColumnName) { + myColumnName = theColumnName; + return new BuilderAddColumnWithName(); + } + + public void addTask(BaseTableTask theTask) { + theTask.setTableName(myTableName); + Builder.this.addTask(theTask); + } + + public BuilderModifyColumnWithName modifyColumn(String theColumnName) { + myColumnName = theColumnName; + return new BuilderModifyColumnWithName(); + } + private class BuilderAddIndexWithName { private boolean myUnique; @@ -106,6 +329,57 @@ public class HapiFhirJpaMigrationTasks { } } } + + private class BuilderAddColumnWithName { + private boolean myNullable; + + public BuilderAddColumnWithNameNullable nullable() { + myNullable = true; + return new BuilderAddColumnWithNameNullable(); + } + + private class BuilderAddColumnWithNameNullable { + public void type(AddColumnTask.ColumnTypeEnum theColumnType) { + AddColumnTask task = new AddColumnTask(); + task.setColumnName(myColumnName); + task.setNullable(myNullable); + task.setColumnType(theColumnType); + addTask(task); + } + } + } + + private class BuilderModifyColumnWithName { + + private boolean myNullable; + + public BuilderModifyColumnWithNameAndNullable nullable() { + myNullable = true; + return new BuilderModifyColumnWithNameAndNullable(); + } + + public BuilderModifyColumnWithNameAndNullable nonNullable() { + myNullable = false; + return new BuilderModifyColumnWithNameAndNullable(); + } + + private class BuilderModifyColumnWithNameAndNullable { + + public void withType(BaseTableColumnTypeTask.ColumnTypeEnum theColumnType, int theLength) { + if (theColumnType == BaseTableColumnTypeTask.ColumnTypeEnum.STRING) { + ModifyColumnTask task = new ModifyColumnTask(); + task.setColumnName(myColumnName); + task.setTableName(myTableName); + task.setColumnLength(theLength); + task.setNullable(myNullable); + addTask(task); + } else { + throw new IllegalArgumentException("Can not specify length for column of type " + theColumnType); + } + } + + } + } } } diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTest.java new file mode 100644 index 00000000000..01bd0c173e2 --- /dev/null +++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/AddColumnTest.java @@ -0,0 +1,44 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.migrate.JdbcUtils; +import org.junit.Test; + +import java.sql.SQLException; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.junit.Assert.assertThat; + +public class AddColumnTest extends BaseTest { + + @Test + public void testColumnDoesntAlreadyExist() throws SQLException { + executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255))"); + + AddColumnTask task = new AddColumnTask(); + task.setTableName("SOMETABLE"); + task.setColumnName("newcol"); + task.setColumnType(AddColumnTask.ColumnTypeEnum.LONG); + task.setNullable(true); + getMigrator().addTask(task); + + getMigrator().migrate(); + + assertThat(JdbcUtils.getColumnNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("PID", "TEXTCOL", "NEWCOL")); + } + + @Test + public void testColumnAlreadyExists() throws SQLException { + executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), newcol bigint)"); + + AddColumnTask task = new AddColumnTask(); + task.setTableName("SOMETABLE"); + task.setColumnName("newcol"); + task.setColumnType(AddColumnTask.ColumnTypeEnum.LONG); + getMigrator().addTask(task); + + getMigrator().migrate(); + + assertThat(JdbcUtils.getColumnNames(getConnectionProperties(), "SOMETABLE"), containsInAnyOrder("PID", "TEXTCOL", "NEWCOL")); + } + +} diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTest.java new file mode 100644 index 00000000000..b75f073f94f --- /dev/null +++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ArbitrarySqlTest.java @@ -0,0 +1,53 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.entity.SearchParamPresent; +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +import static org.junit.Assert.assertEquals; + +public class ArbitrarySqlTest extends BaseTest { + + @Test + public void test350MigrateSearchParams() { + executeSql("create table HFJ_SEARCH_PARM (PID bigint not null, RES_TYPE varchar(255), PARAM_NAME varchar(255))"); + executeSql("insert into HFJ_SEARCH_PARM (PID, RES_TYPE, PARAM_NAME) values (1, 'Patient', 'identifier')"); + executeSql("insert into HFJ_SEARCH_PARM (PID, RES_TYPE, PARAM_NAME) values (2, 'Patient', 'family')"); + executeSql("create table HFJ_RES_PARAM_PRESENT (PID bigint, SP_ID bigint, SP_PRESENT boolean, HASH_PRESENT bigint)"); + executeSql("insert into HFJ_RES_PARAM_PRESENT (PID, SP_ID, SP_PRESENT, HASH_PRESENT) values (100, 1, true, null)"); + executeSql("insert into HFJ_RES_PARAM_PRESENT (PID, SP_ID, SP_PRESENT, HASH_PRESENT) values (101, 2, true, null)"); + + ArbitrarySqlTask task = new ArbitrarySqlTask("Consolidate search parameter presence indexes"); + task.setBatchSize(1); + String sql = "SELECT " + + "HFJ_SEARCH_PARM.RES_TYPE RES_TYPE, HFJ_SEARCH_PARM.PARAM_NAME PARAM_NAME, " + + "HFJ_RES_PARAM_PRESENT.PID PID, HFJ_RES_PARAM_PRESENT.SP_ID SP_ID, HFJ_RES_PARAM_PRESENT.SP_PRESENT SP_PRESENT, HFJ_RES_PARAM_PRESENT.HASH_PRESENT HASH_PRESENT " + + "from HFJ_RES_PARAM_PRESENT " + + "join HFJ_SEARCH_PARM ON (HFJ_SEARCH_PARM.PID = HFJ_RES_PARAM_PRESENT.SP_ID) " + + "where HFJ_RES_PARAM_PRESENT.HASH_PRESENT is null"; + task.addQuery(sql, ArbitrarySqlTask.QueryModeEnum.BATCH_UNTIL_NO_MORE, t -> { + Long pid = (Long) t.get("PID"); + Boolean present = (Boolean) t.get("SP_PRESENT"); + String resType = (String) t.get("RES_TYPE"); + String paramName = (String) t.get("PARAM_NAME"); + Long hash = SearchParamPresent.calculateHashPresence(resType, paramName, present); + task.executeSql("update HFJ_RES_PARAM_PRESENT set HASH_PRESENT = ? where PID = ?", hash, pid); + }); + + getMigrator().addTask(task); + getMigrator().migrate(); + + + List> rows = executeQuery("select * from HFJ_RES_PARAM_PRESENT order by PID asc"); + assertEquals(2, rows.size()); + assertEquals(100L, rows.get(0).get("PID")); + assertEquals(-1100208805056022671L, rows.get(0).get("HASH_PRESENT")); + assertEquals(101L, rows.get(1).get("PID")); + assertEquals(-756348509333838170L, rows.get(1).get("HASH_PRESENT")); + + } + + +} diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTest.java index 8f60d81373c..d113a4d1268 100644 --- a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTest.java +++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/BaseTest.java @@ -5,6 +5,10 @@ import ca.uhn.fhir.jpa.migrate.Migrator; import org.intellij.lang.annotations.Language; import org.junit.After; import org.junit.Before; +import org.springframework.jdbc.core.ColumnMapRowMapper; + +import java.util.List; +import java.util.Map; public class BaseTest { @@ -22,13 +26,19 @@ public class BaseTest { } - protected void executeSql(@Language("SQL") String theSql) { + protected void executeSql(@Language("SQL") String theSql, Object... theArgs) { myConnectionProperties.getTxTemplate().execute(t -> { - myConnectionProperties.newJdbcTemplate().execute(theSql); + myConnectionProperties.newJdbcTemplate().update(theSql, theArgs); return null; }); } + protected List> executeQuery(@Language("SQL") String theSql, Object... theArgs) { + return myConnectionProperties.getTxTemplate().execute(t -> { + return myConnectionProperties.newJdbcTemplate().query(theSql, theArgs, new ColumnMapRowMapper()); + }); + } + public Migrator getMigrator() { return myMigrator; } diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CreateHashesTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CreateHashesTest.java new file mode 100644 index 00000000000..a5140a72b83 --- /dev/null +++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/CreateHashesTest.java @@ -0,0 +1,53 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.entity.BaseResourceIndexedSearchParam; +import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken; +import org.junit.Test; +import org.springframework.jdbc.core.JdbcTemplate; + +import java.util.Map; + +import static org.junit.Assert.assertEquals; + +public class CreateHashesTest extends BaseTest { + + @Test + public void testCreateHashes() { + executeSql("create table HFJ_SPIDX_TOKEN (SP_ID bigint not null, SP_MISSING boolean, SP_NAME varchar(100) not null, RES_ID bigint, RES_TYPE varchar(255) not null, SP_UPDATED timestamp, HASH_IDENTITY bigint, HASH_SYS bigint, HASH_SYS_AND_VALUE bigint, HASH_VALUE bigint, SP_SYSTEM varchar(200), SP_VALUE varchar(200), primary key (SP_ID))"); + executeSql("insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (false, 'identifier', 999, 'Patient', '2018-09-03 07:44:49.196', 'urn:oid:1.2.410.100110.10.41308301', '88888888', 1)"); + executeSql("insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (false, 'identifier', 999, 'Patient', '2018-09-03 07:44:49.196', 'urn:oid:1.2.410.100110.10.41308301', '99999999', 2)"); + + CalculateHashesTask task = new CalculateHashesTask(); + task.setTableName("HFJ_SPIDX_TOKEN"); + task.setColumnName("HASH_IDENTITY"); + task.addCalculator("HASH_IDENTITY", t -> BaseResourceIndexedSearchParam.calculateHashIdentity(t.getResourceType(), t.getString("SP_NAME"))); + task.addCalculator("HASH_SYS", t -> ResourceIndexedSearchParamToken.calculateHashSystem(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"))); + task.addCalculator("HASH_SYS_AND_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashSystemAndValue(t.getResourceType(), t.getParamName(), t.getString("SP_SYSTEM"), t.getString("SP_VALUE"))); + task.addCalculator("HASH_VALUE", t -> ResourceIndexedSearchParamToken.calculateHashValue(t.getResourceType(), t.getParamName(), t.getString("SP_VALUE"))); + task.setBatchSize(1); + getMigrator().addTask(task); + + getMigrator().migrate(); + + + getConnectionProperties().getTxTemplate().execute(t -> { + Map map; + JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate(); + + map = jdbcTemplate.queryForMap("select * from HFJ_SPIDX_TOKEN where SP_ID = 1"); + assertEquals(7001889285610424179L, map.get("HASH_IDENTITY")); + assertEquals(2686400398917843456L, map.get("HASH_SYS")); + assertEquals(-3943098850992523411L, map.get("HASH_SYS_AND_VALUE")); + assertEquals(845040519142030272L, map.get("HASH_VALUE")); + + map = jdbcTemplate.queryForMap("select * from HFJ_SPIDX_TOKEN where SP_ID = 2"); + assertEquals(7001889285610424179L, map.get("HASH_IDENTITY")); + assertEquals(2686400398917843456L, map.get("HASH_SYS")); + assertEquals(-6583685191951870327L, map.get("HASH_SYS_AND_VALUE")); + assertEquals(8271382783311609619L, map.get("HASH_VALUE")); + + return null; + }); + } + +} diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTest.java new file mode 100644 index 00000000000..f0c540af1c4 --- /dev/null +++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/taskdef/ModifyColumnTest.java @@ -0,0 +1,32 @@ +package ca.uhn.fhir.jpa.migrate.taskdef; + +import ca.uhn.fhir.jpa.migrate.JdbcUtils; +import org.junit.Test; + +import java.sql.SQLException; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThat; + +public class ModifyColumnTest extends BaseTest { + + + @Test + public void testColumnAlreadyExists() throws SQLException { + executeSql("create table SOMETABLE (PID bigint not null, TEXTCOL varchar(255), newcol bigint)"); + + ModifyColumnTask task = new ModifyColumnTask(); + task.setTableName("SOMETABLE"); + task.setColumnName("TEXTCOL"); + task.setColumnType(AddColumnTask.ColumnTypeEnum.STRING); + task.setNullable(true); + task.setColumnLength(300); + getMigrator().addTask(task); + + getMigrator().migrate(); + + assertEquals("varchar(300)", JdbcUtils.getColumnType(getConnectionProperties(), "SOMETABLE", "TEXTCOL")); + } + +} diff --git a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasksTest.java b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasksTest.java index bebbe38ca5e..b2e50ab0c08 100644 --- a/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasksTest.java +++ b/hapi-fhir-jpaserver-migrate/src/test/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasksTest.java @@ -9,5 +9,4 @@ public class HapiFhirJpaMigrationTasksTest { new HapiFhirJpaMigrationTasks(); } - } diff --git a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/DefaultProfileValidationSupport.java b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/DefaultProfileValidationSupport.java index 2060416b8bc..6665bd2d8b6 100644 --- a/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/DefaultProfileValidationSupport.java +++ b/hapi-fhir-structures-r4/src/main/java/org/hl7/fhir/r4/hapi/ctx/DefaultProfileValidationSupport.java @@ -1,8 +1,8 @@ package org.hl7.fhir.r4.hapi.ctx; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.io.Charsets; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -15,6 +15,7 @@ import org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent; import org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionComponent; import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity; +import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.*; @@ -169,7 +170,7 @@ public class DefaultProfileValidationSupport implements IValidationSupport { InputStreamReader reader = null; if (inputStream != null) { try { - reader = new InputStreamReader(inputStream, Charsets.UTF_8); + reader = new InputStreamReader(inputStream, Constants.CHARSET_UTF8); Bundle bundle = theContext.newXmlParser().parseResource(Bundle.class, reader); for (BundleEntryComponent next : bundle.getEntry()) { @@ -190,8 +191,14 @@ public class DefaultProfileValidationSupport implements IValidationSupport { } } } finally { - IOUtils.closeQuietly(reader); - IOUtils.closeQuietly(inputStream); + try { + if (reader != null) { + reader.close(); + } + inputStream.close(); + } catch (IOException e) { + ourLog.warn("Failure closing stream", e); + } } } else { ourLog.warn("Unable to load resource: {}", theClasspath); @@ -202,7 +209,7 @@ public class DefaultProfileValidationSupport implements IValidationSupport { ourLog.info("Loading structure definitions from classpath: {}", theClasspath); InputStream valuesetText = DefaultProfileValidationSupport.class.getResourceAsStream(theClasspath); if (valuesetText != null) { - InputStreamReader reader = new InputStreamReader(valuesetText, Charsets.UTF_8); + InputStreamReader reader = new InputStreamReader(valuesetText, Constants.CHARSET_UTF8); Bundle bundle = theContext.newXmlParser().parseResource(Bundle.class, reader); for (BundleEntryComponent next : bundle.getEntry()) {