allow `DruidSchema` to fallback to segment metadata 'type' if 'typeSignature' is null (#12016)

* allow `DruidSchema` to fallback to segment metadata type if typeSignature is null, to avoid producing incorrect SQL schema if broker is upgraded to 0.23 before historicals

* mmm, forbidden tests
This commit is contained in:
Clint Wylie 2021-12-02 17:42:01 -08:00 committed by GitHub
parent 84b4bf56d8
commit af6541a236
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 113 additions and 3 deletions

View File

@ -875,7 +875,8 @@ public class DruidSchema extends AbstractSchema
.runSimple(segmentMetadataQuery, escalator.createEscalatedAuthenticationResult(), Access.OK); .runSimple(segmentMetadataQuery, escalator.createEscalatedAuthenticationResult(), Access.OK);
} }
private static RowSignature analysisToRowSignature(final SegmentAnalysis analysis) @VisibleForTesting
static RowSignature analysisToRowSignature(final SegmentAnalysis analysis)
{ {
final RowSignature.Builder rowSignatureBuilder = RowSignature.builder(); final RowSignature.Builder rowSignatureBuilder = RowSignature.builder();
for (Map.Entry<String, ColumnAnalysis> entry : analysis.getColumns().entrySet()) { for (Map.Entry<String, ColumnAnalysis> entry : analysis.getColumns().entrySet()) {
@ -886,9 +887,18 @@ public class DruidSchema extends AbstractSchema
ColumnType valueType = entry.getValue().getTypeSignature(); ColumnType valueType = entry.getValue().getTypeSignature();
// this shouldn't happen, but if it does assume types are some flavor of COMPLEX. // this shouldn't happen, but if it does, first try to fall back to legacy type information field in case
// standard upgrade order was not followed for 0.22 to 0.23+, and if that also fails, then assume types are some
// flavor of COMPLEX.
if (valueType == null) { if (valueType == null) {
valueType = ColumnType.UNKNOWN_COMPLEX; // at some point in the future this can be simplified to the contents of the catch clause here, once the
// likelyhood of upgrading from some version lower than 0.23 is low
try {
valueType = ColumnType.fromString(entry.getValue().getType());
}
catch (IllegalArgumentException ignored) {
valueType = ColumnType.UNKNOWN_COMPLEX;
}
} }
rowSignatureBuilder.add(entry.getKey(), valueType); rowSignatureBuilder.add(entry.getKey(), valueType);

View File

@ -40,11 +40,15 @@ import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import org.apache.druid.query.metadata.metadata.AllColumnIncluderator; import org.apache.druid.query.metadata.metadata.AllColumnIncluderator;
import org.apache.druid.query.metadata.metadata.ColumnAnalysis;
import org.apache.druid.query.metadata.metadata.SegmentAnalysis;
import org.apache.druid.query.metadata.metadata.SegmentMetadataQuery; import org.apache.druid.query.metadata.metadata.SegmentMetadataQuery;
import org.apache.druid.query.spec.MultipleSpecificSegmentSpec; import org.apache.druid.query.spec.MultipleSpecificSegmentSpec;
import org.apache.druid.segment.IndexBuilder; import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.MapJoinableFactory; import org.apache.druid.segment.join.MapJoinableFactory;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
@ -1087,6 +1091,102 @@ public class DruidSchemaTest extends DruidSchemaTestCommon
} }
@Test
public void testSegmentMetadataColumnType()
{
RowSignature signature = DruidSchema.analysisToRowSignature(
new SegmentAnalysis(
"id",
ImmutableList.of(Intervals.utc(1L, 2L)),
ImmutableMap.of(
"a",
new ColumnAnalysis(
ColumnType.STRING,
ColumnType.STRING.asTypeString(),
false,
true,
1234,
26,
"a",
"z",
null
),
"count",
new ColumnAnalysis(
ColumnType.LONG,
ColumnType.LONG.asTypeString(),
false,
true,
1234,
26,
"a",
"z",
null
)
),
1234,
100,
null,
null,
null,
null
)
);
Assert.assertEquals(
RowSignature.builder().add("a", ColumnType.STRING).add("count", ColumnType.LONG).build(),
signature
);
}
@Test
public void testSegmentMetadataFallbackType()
{
RowSignature signature = DruidSchema.analysisToRowSignature(
new SegmentAnalysis(
"id",
ImmutableList.of(Intervals.utc(1L, 2L)),
ImmutableMap.of(
"a",
new ColumnAnalysis(
null,
ColumnType.STRING.asTypeString(),
false,
true,
1234,
26,
"a",
"z",
null
),
"count",
new ColumnAnalysis(
null,
ColumnType.LONG.asTypeString(),
false,
true,
1234,
26,
"a",
"z",
null
)
),
1234,
100,
null,
null,
null,
null
)
);
Assert.assertEquals(
RowSignature.builder().add("a", ColumnType.STRING).add("count", ColumnType.LONG).build(),
signature
);
}
private static DataSegment newSegment(String datasource, int partitionId) private static DataSegment newSegment(String datasource, int partitionId)
{ {
return new DataSegment( return new DataSegment(