diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java index 5db9bfa4689..42fc8526d84 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java @@ -875,7 +875,8 @@ public class DruidSchema extends AbstractSchema .runSimple(segmentMetadataQuery, escalator.createEscalatedAuthenticationResult(), Access.OK); } - private static RowSignature analysisToRowSignature(final SegmentAnalysis analysis) + @VisibleForTesting + static RowSignature analysisToRowSignature(final SegmentAnalysis analysis) { final RowSignature.Builder rowSignatureBuilder = RowSignature.builder(); for (Map.Entry entry : analysis.getColumns().entrySet()) { @@ -886,9 +887,18 @@ public class DruidSchema extends AbstractSchema ColumnType valueType = entry.getValue().getTypeSignature(); - // this shouldn't happen, but if it does assume types are some flavor of COMPLEX. + // this shouldn't happen, but if it does, first try to fall back to legacy type information field in case + // standard upgrade order was not followed for 0.22 to 0.23+, and if that also fails, then assume types are some + // flavor of COMPLEX. if (valueType == null) { - valueType = ColumnType.UNKNOWN_COMPLEX; + // at some point in the future this can be simplified to the contents of the catch clause here, once the + // likelyhood of upgrading from some version lower than 0.23 is low + try { + valueType = ColumnType.fromString(entry.getValue().getType()); + } + catch (IllegalArgumentException ignored) { + valueType = ColumnType.UNKNOWN_COMPLEX; + } } rowSignatureBuilder.add(entry.getKey(), valueType); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java index 2bfd3ca0038..0d0ba2f6f95 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java @@ -40,11 +40,15 @@ import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import org.apache.druid.query.metadata.metadata.AllColumnIncluderator; +import org.apache.druid.query.metadata.metadata.ColumnAnalysis; +import org.apache.druid.query.metadata.metadata.SegmentAnalysis; import org.apache.druid.query.metadata.metadata.SegmentMetadataQuery; import org.apache.druid.query.spec.MultipleSpecificSegmentSpec; import org.apache.druid.segment.IndexBuilder; import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.TestHelper; +import org.apache.druid.segment.column.ColumnType; +import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.join.MapJoinableFactory; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; @@ -1087,6 +1091,102 @@ public class DruidSchemaTest extends DruidSchemaTestCommon } + @Test + public void testSegmentMetadataColumnType() + { + RowSignature signature = DruidSchema.analysisToRowSignature( + new SegmentAnalysis( + "id", + ImmutableList.of(Intervals.utc(1L, 2L)), + ImmutableMap.of( + "a", + new ColumnAnalysis( + ColumnType.STRING, + ColumnType.STRING.asTypeString(), + false, + true, + 1234, + 26, + "a", + "z", + null + ), + "count", + new ColumnAnalysis( + ColumnType.LONG, + ColumnType.LONG.asTypeString(), + false, + true, + 1234, + 26, + "a", + "z", + null + ) + ), + 1234, + 100, + null, + null, + null, + null + ) + ); + + Assert.assertEquals( + RowSignature.builder().add("a", ColumnType.STRING).add("count", ColumnType.LONG).build(), + signature + ); + } + + + @Test + public void testSegmentMetadataFallbackType() + { + RowSignature signature = DruidSchema.analysisToRowSignature( + new SegmentAnalysis( + "id", + ImmutableList.of(Intervals.utc(1L, 2L)), + ImmutableMap.of( + "a", + new ColumnAnalysis( + null, + ColumnType.STRING.asTypeString(), + false, + true, + 1234, + 26, + "a", + "z", + null + ), + "count", + new ColumnAnalysis( + null, + ColumnType.LONG.asTypeString(), + false, + true, + 1234, + 26, + "a", + "z", + null + ) + ), + 1234, + 100, + null, + null, + null, + null + ) + ); + Assert.assertEquals( + RowSignature.builder().add("a", ColumnType.STRING).add("count", ColumnType.LONG).build(), + signature + ); + } + private static DataSegment newSegment(String datasource, int partitionId) { return new DataSegment(