fix bugs with auto encoded long vector deserializers (#14186)

This PR fixes an issue when using 'auto' encoded LONG typed columns and the 'vectorized' query engine. These columns use a delta based bit-packing mechanism, and errors in the vectorized reader would cause it to incorrectly read column values for some bit sizes (1 through 32 bits). This is a regression caused by #11004, which added the optimized readers to improve performance, so impacts Druid versions 0.22.0+.

While writing the test I finally got sad enough about IndexSpec not having a "builder", so I made one, and switched all the things to use it. Apologies for the noise in this bug fix PR, the only real changes are in VSizeLongSerde, and the tests that have been modified to cover the buggy behavior, VSizeLongSerdeTest and ExpressionVectorSelectorsTest. Everything else is just cleanup of IndexSpec usage.
This commit is contained in:
Clint Wylie 2023-04-30 23:19:27 -07:00 committed by GitHub
parent 32af570fb2
commit 90ea192d9c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
94 changed files with 818 additions and 630 deletions

View File

@ -179,7 +179,7 @@ public class FilterPartitionBenchmark
indexFile = INDEX_MERGER_V9.persist(
incIndex,
tmpDir,
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
qIndex = INDEX_IO.loadIndex(indexFile);

View File

@ -282,7 +282,7 @@ public class FilteredAggregatorBenchmark
File indexFile = INDEX_MERGER_V9.persist(
incIndex,
qIndexesDir,
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
incIndex.close();

View File

@ -324,7 +324,7 @@ public class GroupByTypeInterfaceBenchmark
final File file = INDEX_MERGER_V9.persist(
index,
new File(tmpDir, String.valueOf(i)),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);

View File

@ -287,7 +287,7 @@ public class TopNTypeInterfaceBenchmark
File indexFile = INDEX_MERGER_V9.persist(
incIndexes.get(i),
tmpFile,
new IndexSpec(),
IndexSpec.DEFAULT,
null
);

View File

@ -141,7 +141,7 @@ public class IndexMergeBenchmark
File indexFile = indexMergerV9.persist(
incIndex,
tmpDir,
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
@ -166,7 +166,7 @@ public class IndexMergeBenchmark
rollup,
schemaInfo.getAggsArray(),
tmpFile,
new IndexSpec(),
IndexSpec.DEFAULT,
null,
-1
);

View File

@ -189,7 +189,7 @@ public class IndexPersistBenchmark
File indexFile = INDEX_MERGER_V9.persist(
incIndex,
tmpDir,
new IndexSpec(),
IndexSpec.DEFAULT,
null
);

View File

@ -601,7 +601,7 @@ public class GroupByBenchmark
File indexFile = INDEX_MERGER_V9.persist(
incIndex,
new File(qIndexesDir, String.valueOf(i)),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
incIndex.close();

View File

@ -340,7 +340,7 @@ public class ScanBenchmark
File indexFile = INDEX_MERGER_V9.persist(
incIndex,
new File(qIndexesDir, String.valueOf(i)),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
incIndex.close();

View File

@ -407,7 +407,7 @@ public class SearchBenchmark
File indexFile = INDEX_MERGER_V9.persist(
incIndex,
new File(qIndexesDir, String.valueOf(i)),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
incIndex.close();

View File

@ -457,15 +457,7 @@ public class SqlBenchmark
final QueryableIndex index = segmentGenerator.generate(
dataSegment,
schemaInfo,
new IndexSpec(
null,
null,
encodingStrategy,
null,
null,
null,
null
),
IndexSpec.builder().withStringDictionaryEncoding(encodingStrategy).build(),
Granularities.NONE,
rowsPerSegment
);

View File

@ -322,15 +322,7 @@ public class SqlNestedDataBenchmark
schemaInfo,
dimsSpec,
transformSpec,
new IndexSpec(
null,
null,
encodingStrategy,
null,
null,
null,
null
),
IndexSpec.builder().withStringDictionaryEncoding(encodingStrategy).build(),
Granularities.NONE,
rowsPerSegment
);

View File

@ -332,7 +332,7 @@ public class TimeseriesBenchmark
File indexFile = INDEX_MERGER_V9.persist(
incIndex,
new File(qIndexesDir, String.valueOf(i)),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
incIndex.close();

View File

@ -308,7 +308,7 @@ public class TopNBenchmark
File indexFile = INDEX_MERGER_V9.persist(
incIndex,
new File(qIndexesDir, String.valueOf(i)),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
incIndex.close();

View File

@ -336,7 +336,7 @@ public class TimeCompareBenchmark
File indexFile = INDEX_MERGER_V9.persist(
incIndexes.get(i),
tmpDir,
new IndexSpec(),
IndexSpec.DEFAULT,
null
);

View File

@ -51,7 +51,7 @@ import java.io.File;
public class K8sTestUtils
{
private static final IndexSpec INDEX_SPEC = new IndexSpec();
private static final IndexSpec INDEX_SPEC = IndexSpec.DEFAULT;
/*

View File

@ -68,8 +68,8 @@ public class KafkaIndexTaskTuningConfigTest
Assert.assertNull(config.getMaxTotalRows());
Assert.assertEquals(new Period("PT10M"), config.getIntermediatePersistPeriod());
Assert.assertEquals(0, config.getMaxPendingPersists());
Assert.assertEquals(new IndexSpec(), config.getIndexSpec());
Assert.assertEquals(new IndexSpec(), config.getIndexSpecForIntermediatePersists());
Assert.assertEquals(IndexSpec.DEFAULT, config.getIndexSpec());
Assert.assertEquals(IndexSpec.DEFAULT, config.getIndexSpecForIntermediatePersists());
Assert.assertEquals(false, config.isReportParseExceptions());
Assert.assertEquals(0, config.getHandoffConditionTimeout());
}
@ -112,8 +112,14 @@ public class KafkaIndexTaskTuningConfigTest
Assert.assertEquals(100, config.getMaxPendingPersists());
Assert.assertEquals(true, config.isReportParseExceptions());
Assert.assertEquals(100, config.getHandoffConditionTimeout());
Assert.assertEquals(new IndexSpec(null, null, CompressionStrategy.NONE, null), config.getIndexSpec());
Assert.assertEquals(new IndexSpec(null, CompressionStrategy.UNCOMPRESSED, null, null), config.getIndexSpecForIntermediatePersists());
Assert.assertEquals(
IndexSpec.builder().withMetricCompression(CompressionStrategy.NONE).build(),
config.getIndexSpec()
);
Assert.assertEquals(
IndexSpec.builder().withDimensionCompression(CompressionStrategy.UNCOMPRESSED).build(),
config.getIndexSpecForIntermediatePersists()
);
}
@Test
@ -128,8 +134,8 @@ public class KafkaIndexTaskTuningConfigTest
10L,
new Period("PT3S"),
4,
new IndexSpec(),
new IndexSpec(),
IndexSpec.DEFAULT,
IndexSpec.DEFAULT,
true,
5L,
null,
@ -156,7 +162,7 @@ public class KafkaIndexTaskTuningConfigTest
Assert.assertEquals(new Period("PT3S"), copy.getIntermediatePersistPeriod());
Assert.assertNull(copy.getBasePersistDirectory());
Assert.assertEquals(4, copy.getMaxPendingPersists());
Assert.assertEquals(new IndexSpec(), copy.getIndexSpec());
Assert.assertEquals(IndexSpec.DEFAULT, copy.getIndexSpec());
Assert.assertEquals(true, copy.isReportParseExceptions());
Assert.assertEquals(5L, copy.getHandoffConditionTimeout());
}
@ -174,8 +180,8 @@ public class KafkaIndexTaskTuningConfigTest
new Period("PT3S"),
new File("/tmp/xxx"),
4,
new IndexSpec(),
new IndexSpec(),
IndexSpec.DEFAULT,
IndexSpec.DEFAULT,
true,
5L,
null,
@ -222,8 +228,8 @@ public class KafkaIndexTaskTuningConfigTest
10L,
new Period("PT3S"),
4,
new IndexSpec(),
new IndexSpec(),
IndexSpec.DEFAULT,
IndexSpec.DEFAULT,
true,
5L,
null,
@ -262,7 +268,12 @@ public class KafkaIndexTaskTuningConfigTest
public void testEqualsAndHashCode()
{
EqualsVerifier.forClass(KafkaIndexTaskTuningConfig.class)
.usingGetClass()
.verify();
.withPrefabValues(
IndexSpec.class,
IndexSpec.DEFAULT,
IndexSpec.builder().withDimensionCompression(CompressionStrategy.ZSTD).build()
)
.usingGetClass()
.verify();
}
}

View File

@ -63,8 +63,8 @@ public class KafkaSupervisorTuningConfigTest
Assert.assertEquals(5_000_000, config.getMaxRowsPerSegment().intValue());
Assert.assertEquals(new Period("PT10M"), config.getIntermediatePersistPeriod());
Assert.assertEquals(0, config.getMaxPendingPersists());
Assert.assertEquals(new IndexSpec(), config.getIndexSpec());
Assert.assertEquals(new IndexSpec(), config.getIndexSpecForIntermediatePersists());
Assert.assertEquals(IndexSpec.DEFAULT, config.getIndexSpec());
Assert.assertEquals(IndexSpec.DEFAULT, config.getIndexSpecForIntermediatePersists());
Assert.assertEquals(false, config.isReportParseExceptions());
Assert.assertEquals(0, config.getHandoffConditionTimeout());
Assert.assertNull(config.getWorkerThreads());
@ -122,8 +122,14 @@ public class KafkaSupervisorTuningConfigTest
Assert.assertEquals(Duration.standardSeconds(15), config.getHttpTimeout());
Assert.assertEquals(Duration.standardSeconds(95), config.getShutdownTimeout());
Assert.assertEquals(Duration.standardSeconds(20), config.getOffsetFetchPeriod());
Assert.assertEquals(new IndexSpec(null, null, CompressionStrategy.NONE, null), config.getIndexSpec());
Assert.assertEquals(new IndexSpec(null, CompressionStrategy.UNCOMPRESSED, null, null), config.getIndexSpecForIntermediatePersists());
Assert.assertEquals(
IndexSpec.builder().withMetricCompression(CompressionStrategy.NONE).build(),
config.getIndexSpec()
);
Assert.assertEquals(
IndexSpec.builder().withDimensionCompression(CompressionStrategy.UNCOMPRESSED).build(),
config.getIndexSpecForIntermediatePersists()
);
}
}

View File

@ -27,6 +27,7 @@ import org.apache.druid.indexing.kinesis.supervisor.KinesisSupervisorTuningConfi
import org.apache.druid.indexing.kinesis.test.TestModifiedKinesisIndexTaskTuningConfig;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.incremental.OnheapIncrementalIndex;
import org.apache.druid.segment.indexing.TuningConfig;
import org.hamcrest.CoreMatchers;
@ -73,7 +74,7 @@ public class KinesisIndexTaskTuningConfigTest
Assert.assertEquals(5_000_000, config.getMaxRowsPerSegment().intValue());
Assert.assertEquals(new Period("PT10M"), config.getIntermediatePersistPeriod());
Assert.assertEquals(0, config.getMaxPendingPersists());
Assert.assertEquals(new IndexSpec(), config.getIndexSpec());
Assert.assertEquals(IndexSpec.DEFAULT, config.getIndexSpec());
Assert.assertFalse(config.isReportParseExceptions());
Assert.assertEquals(0, config.getHandoffConditionTimeout());
Assert.assertNull(config.getRecordBufferSizeConfigured());
@ -146,8 +147,8 @@ public class KinesisIndexTaskTuningConfigTest
new Period("PT3S"),
new File("/tmp/xxx"),
4,
new IndexSpec(),
new IndexSpec(),
IndexSpec.DEFAULT,
IndexSpec.DEFAULT,
true,
5L,
true,
@ -205,8 +206,8 @@ public class KinesisIndexTaskTuningConfigTest
new Period("PT3S"),
new File("/tmp/xxx"),
4,
new IndexSpec(),
new IndexSpec(),
IndexSpec.DEFAULT,
IndexSpec.DEFAULT,
true,
5L,
true,
@ -289,8 +290,8 @@ public class KinesisIndexTaskTuningConfigTest
100L,
new Period("PT3S"),
4,
new IndexSpec(),
new IndexSpec(),
IndexSpec.DEFAULT,
IndexSpec.DEFAULT,
true,
5L,
true,
@ -325,7 +326,7 @@ public class KinesisIndexTaskTuningConfigTest
Assert.assertEquals(new Period("PT3S"), copy.getIntermediatePersistPeriod());
Assert.assertNull(copy.getBasePersistDirectory());
Assert.assertEquals(4, copy.getMaxPendingPersists());
Assert.assertEquals(new IndexSpec(), copy.getIndexSpec());
Assert.assertEquals(IndexSpec.DEFAULT, copy.getIndexSpec());
Assert.assertTrue(copy.isReportParseExceptions());
Assert.assertEquals(5L, copy.getHandoffConditionTimeout());
Assert.assertEquals(1000, (int) copy.getRecordBufferSizeConfigured());
@ -342,7 +343,12 @@ public class KinesisIndexTaskTuningConfigTest
public void testEqualsAndHashCode()
{
EqualsVerifier.forClass(KinesisIndexTaskTuningConfig.class)
.usingGetClass()
.verify();
.withPrefabValues(
IndexSpec.class,
IndexSpec.DEFAULT,
IndexSpec.builder().withDimensionCompression(CompressionStrategy.ZSTD).build()
)
.usingGetClass()
.verify();
}
}

View File

@ -62,7 +62,7 @@ public class KinesisSupervisorTuningConfigTest
Assert.assertEquals(5_000_000, config.getMaxRowsPerSegment().intValue());
Assert.assertEquals(new Period("PT10M"), config.getIntermediatePersistPeriod());
Assert.assertEquals(0, config.getMaxPendingPersists());
Assert.assertEquals(new IndexSpec(), config.getIndexSpec());
Assert.assertEquals(IndexSpec.DEFAULT, config.getIndexSpec());
Assert.assertEquals(false, config.isReportParseExceptions());
Assert.assertEquals(0, config.getHandoffConditionTimeout());
Assert.assertNull(config.getWorkerThreads());

View File

@ -122,7 +122,7 @@ public class MSQTuningConfig
public IndexSpec getIndexSpec()
{
return indexSpec != null ? indexSpec : new IndexSpec();
return indexSpec != null ? indexSpec : IndexSpec.DEFAULT;
}
@Override

View File

@ -302,13 +302,13 @@ public class SegmentGeneratorFrameProcessorFactory
public IndexSpec getIndexSpecForIntermediatePersists()
{
// Disable compression for intermediate persists to reduce direct memory usage.
return new IndexSpec(
null,
CompressionStrategy.UNCOMPRESSED, // Dimensions don't support NONE, so use UNCOMPRESSED
CompressionStrategy.NONE, // NONE is more efficient than UNCOMPRESSED
CompressionFactory.LongEncodingStrategy.LONGS,
null
);
return IndexSpec.builder()
// Dimensions don't support NONE, so use UNCOMPRESSED
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
// NONE is more efficient than UNCOMPRESSED
.withMetricCompression(CompressionStrategy.NONE)
.withLongEncoding(CompressionFactory.LongEncodingStrategy.LONGS)
.build();
}
@Override

View File

@ -20,6 +20,8 @@
package org.apache.druid.msq.indexing;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.data.CompressionStrategy;
import org.junit.Test;
public class MSQSpecTest
@ -30,6 +32,11 @@ public class MSQSpecTest
{
EqualsVerifier.forClass(MSQSpec.class)
.withNonnullFields("query", "destination", "tuningConfig")
.withPrefabValues(
IndexSpec.class,
IndexSpec.DEFAULT,
IndexSpec.builder().withDimensionCompression(CompressionStrategy.ZSTD).build()
)
.usingGetClass()
.verify();
}

View File

@ -24,6 +24,7 @@ import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.column.StringEncodingStrategy;
import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.data.FrontCodedIndexed;
import org.junit.Assert;
import org.junit.Test;
@ -52,15 +53,11 @@ public class MSQTuningConfigTest
2,
3,
4,
new IndexSpec(
null,
null,
new StringEncodingStrategy.FrontCoded(null, FrontCodedIndexed.V1),
null,
null,
null,
null
)
IndexSpec.builder()
.withStringDictionaryEncoding(
new StringEncodingStrategy.FrontCoded(null, FrontCodedIndexed.V1)
)
.build()
);
Assert.assertEquals(config, mapper.readValue(mapper.writeValueAsString(config), MSQTuningConfig.class));
@ -69,6 +66,13 @@ public class MSQTuningConfigTest
@Test
public void testEquals()
{
EqualsVerifier.forClass(MSQTuningConfig.class).usingGetClass().verify();
EqualsVerifier.forClass(MSQTuningConfig.class)
.withPrefabValues(
IndexSpec.class,
IndexSpec.DEFAULT,
IndexSpec.builder().withDimensionCompression(CompressionStrategy.ZSTD).build()
)
.usingGetClass()
.verify();
}
}

View File

@ -204,16 +204,20 @@ public class MultiStageQueryContextTest
{
Assert.assertNull(decodeIndexSpec(null));
Assert.assertEquals(new IndexSpec(), decodeIndexSpec("{}"));
Assert.assertEquals(new IndexSpec(), decodeIndexSpec(Collections.emptyMap()));
Assert.assertEquals(IndexSpec.DEFAULT, decodeIndexSpec("{}"));
Assert.assertEquals(IndexSpec.DEFAULT, decodeIndexSpec(Collections.emptyMap()));
Assert.assertEquals(
new IndexSpec(null, null, new StringEncodingStrategy.FrontCoded(null, null), null, null, null, null),
IndexSpec.builder()
.withStringDictionaryEncoding(new StringEncodingStrategy.FrontCoded(null, null))
.build(),
decodeIndexSpec("{\"stringDictionaryEncoding\":{\"type\":\"frontCoded\"}}")
);
Assert.assertEquals(
new IndexSpec(null, null, new StringEncodingStrategy.FrontCoded(null), null, null, null, null),
IndexSpec.builder()
.withStringDictionaryEncoding(new StringEncodingStrategy.FrontCoded(null))
.build(),
decodeIndexSpec(ImmutableMap.of("stringDictionaryEncoding", ImmutableMap.of("type", "frontCoded")))
);

View File

@ -43,7 +43,7 @@ public class HadoopTuningConfig implements TuningConfig
{
private static final DimensionBasedPartitionsSpec DEFAULT_PARTITIONS_SPEC = HashedPartitionsSpec.defaultSpec();
private static final Map<Long, List<HadoopyShardSpec>> DEFAULT_SHARD_SPECS = ImmutableMap.of();
private static final IndexSpec DEFAULT_INDEX_SPEC = new IndexSpec();
private static final IndexSpec DEFAULT_INDEX_SPEC = IndexSpec.DEFAULT;
private static final boolean DEFAULT_USE_COMBINER = false;
private static final int DEFAULT_NUM_BACKGROUND_PERSIST_THREADS = 0;

View File

@ -73,8 +73,8 @@ public class HadoopTuningConfigTest
Assert.assertEquals(new OnheapIncrementalIndex.Spec(), actual.getAppendableIndexSpec());
Assert.assertNotNull(actual.getPartitionsSpec());
Assert.assertEquals(ImmutableMap.<Long, List<HadoopyShardSpec>>of(), actual.getShardSpecs());
Assert.assertEquals(new IndexSpec(), actual.getIndexSpec());
Assert.assertEquals(new IndexSpec(), actual.getIndexSpecForIntermediatePersists());
Assert.assertEquals(IndexSpec.DEFAULT, actual.getIndexSpec());
Assert.assertEquals(IndexSpec.DEFAULT, actual.getIndexSpecForIntermediatePersists());
Assert.assertEquals(100, actual.getMaxRowsInMemory());
Assert.assertEquals(true, actual.isLeaveIntermediate());
Assert.assertEquals(true, actual.isCleanupOnFailure());

View File

@ -42,7 +42,7 @@ public class RealtimeAppenderatorTuningConfig implements AppenderatorConfig
private static final Period DEFAULT_INTERMEDIATE_PERSIST_PERIOD = new Period("PT10M");
private static final int DEFAULT_MAX_PENDING_PERSISTS = 0;
private static final ShardSpec DEFAULT_SHARD_SPEC = new NumberedShardSpec(0, 1);
private static final IndexSpec DEFAULT_INDEX_SPEC = new IndexSpec();
private static final IndexSpec DEFAULT_INDEX_SPEC = IndexSpec.DEFAULT;
private static final Boolean DEFAULT_REPORT_PARSE_EXCEPTIONS = Boolean.FALSE;
private static final long DEFAULT_HANDOFF_CONDITION_TIMEOUT = 0;
private static final long DEFAULT_ALERT_TIMEOUT = 0;

View File

@ -1236,7 +1236,7 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler
public static class IndexTuningConfig implements AppenderatorConfig
{
private static final IndexSpec DEFAULT_INDEX_SPEC = new IndexSpec();
private static final IndexSpec DEFAULT_INDEX_SPEC = IndexSpec.DEFAULT;
private static final int DEFAULT_MAX_PENDING_PERSISTS = 0;
private static final boolean DEFAULT_GUARANTEE_ROLLUP = false;
private static final boolean DEFAULT_REPORT_PARSE_EXCEPTIONS = false;

View File

@ -77,7 +77,7 @@ public class TestIndexTask extends IndexTask
null,
null,
new DynamicPartitionsSpec(10000, null),
new IndexSpec(),
IndexSpec.DEFAULT,
null,
3,
false,

View File

@ -169,7 +169,7 @@ public class AppenderatorsTest
maxRowsInMemory,
maxSizeInBytes == 0L ? getDefaultMaxBytesInMemory() : maxSizeInBytes,
skipBytesInMemoryOverheadCheck,
new IndexSpec(),
IndexSpec.DEFAULT,
0,
false,
0L,

View File

@ -174,7 +174,7 @@ public class BatchAppenderatorsTest
maxRowsInMemory,
maxSizeInBytes == 0L ? getDefaultMaxBytesInMemory() : maxSizeInBytes,
skipBytesInMemoryOverheadCheck,
new IndexSpec(),
IndexSpec.DEFAULT,
0,
false,
0L,

View File

@ -54,7 +54,6 @@ import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.rpc.indexing.OverlordClient;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.data.BitmapSerde.DefaultBitmapSerdeFactory;
import org.apache.druid.segment.data.CompressionFactory.LongEncodingStrategy;
import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.incremental.OnheapIncrementalIndex;
@ -101,18 +100,16 @@ public class ClientCompactionTaskQuerySerdeTest
null,
new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10),
new DynamicPartitionsSpec(100, 30000L),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.UNCOMPRESSED,
LongEncodingStrategy.AUTO
),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.UNCOMPRESSED)
.withLongEncoding(LongEncodingStrategy.AUTO)
.build(),
2,
1000L,
TmpFileSegmentWriteOutMediumFactory.instance(),
@ -260,18 +257,16 @@ public class ClientCompactionTaskQuerySerdeTest
null,
new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10),
new DynamicPartitionsSpec(100, 30000L),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.UNCOMPRESSED,
LongEncodingStrategy.AUTO
),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.UNCOMPRESSED)
.withLongEncoding(LongEncodingStrategy.AUTO)
.build(),
2,
null,
null,
@ -322,18 +317,16 @@ public class ClientCompactionTaskQuerySerdeTest
30000L,
new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), 10),
new DynamicPartitionsSpec(100, 30000L),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.UNCOMPRESSED,
LongEncodingStrategy.AUTO
),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.UNCOMPRESSED)
.withLongEncoding(LongEncodingStrategy.AUTO)
.build(),
2,
1000L,
TmpFileSegmentWriteOutMediumFactory.instance(),

View File

@ -210,7 +210,7 @@ public class CompactionTaskRunTest extends IngestionTestBase
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))),
ImmutableList.of(expectedLongSumMetric),
null,
mapper.readValue(mapper.writeValueAsString(new IndexSpec()), Map.class),
IndexSpec.DEFAULT.asMap(mapper),
mapper.readValue(
mapper.writeValueAsString(
new UniformGranularitySpec(
@ -773,7 +773,7 @@ public class CompactionTaskRunTest extends IngestionTestBase
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))),
ImmutableList.of(expectedLongSumMetric),
getObjectMapper().readValue(getObjectMapper().writeValueAsString(compactionTask.getTransformSpec()), Map.class),
mapper.readValue(mapper.writeValueAsString(new IndexSpec()), Map.class),
IndexSpec.DEFAULT.asMap(mapper),
mapper.readValue(
mapper.writeValueAsString(
new UniformGranularitySpec(
@ -837,7 +837,7 @@ public class CompactionTaskRunTest extends IngestionTestBase
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("ts", "dim"))),
ImmutableList.of(expectedCountMetric, expectedLongSumMetric),
getObjectMapper().readValue(getObjectMapper().writeValueAsString(compactionTask.getTransformSpec()), Map.class),
mapper.readValue(mapper.writeValueAsString(new IndexSpec()), Map.class),
IndexSpec.DEFAULT.asMap(mapper),
mapper.readValue(
mapper.writeValueAsString(
new UniformGranularitySpec(

View File

@ -325,12 +325,12 @@ public class CompactionTaskTest
null,
null,
null,
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,
@ -625,12 +625,12 @@ public class CompactionTaskTest
null,
null,
null,
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,
@ -707,12 +707,12 @@ public class CompactionTaskTest
null,
null,
null,
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,
@ -738,12 +738,12 @@ public class CompactionTaskTest
null,
null,
null,
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,
@ -783,12 +783,12 @@ public class CompactionTaskTest
null,
null,
null,
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,
@ -822,12 +822,12 @@ public class CompactionTaskTest
null,
null,
null,
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,
@ -972,12 +972,12 @@ public class CompactionTaskTest
null,
null,
null,
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,
@ -1048,12 +1048,12 @@ public class CompactionTaskTest
null,
null,
null,
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
false,
@ -1124,12 +1124,12 @@ public class CompactionTaskTest
null,
null,
new HashedPartitionsSpec(null, 3, null),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,
@ -1784,12 +1784,12 @@ public class CompactionTaskTest
null,
null,
new HashedPartitionsSpec(5000000, null, null), // automatically computed targetPartitionSize
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,

View File

@ -78,13 +78,13 @@ public class CompactionTuningConfigTest
null,
null,
new DynamicPartitionsSpec(100, 100L),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
false,
true,
@ -120,13 +120,13 @@ public class CompactionTuningConfigTest
null,
null,
new DynamicPartitionsSpec(100, 100L),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
false,
true,
@ -163,13 +163,13 @@ public class CompactionTuningConfigTest
null,
null,
new DynamicPartitionsSpec(100, 100L),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
false,
true,
@ -196,6 +196,11 @@ public class CompactionTuningConfigTest
public void testEqualsAndHashCode()
{
EqualsVerifier.forClass(CompactionTask.CompactionTuningConfig.class)
.withPrefabValues(
IndexSpec.class,
IndexSpec.DEFAULT,
IndexSpec.builder().withDimensionCompression(CompressionStrategy.ZSTD).build()
)
.usingGetClass()
.verify();
}

View File

@ -68,12 +68,12 @@ public class IndexTaskSerdeTest
null,
null,
new DynamicPartitionsSpec(1000, 2000L),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
false,
@ -105,12 +105,12 @@ public class IndexTaskSerdeTest
null,
null,
new HashedPartitionsSpec(null, 10, ImmutableList.of("dim1", "dim2")),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,
@ -142,12 +142,12 @@ public class IndexTaskSerdeTest
null,
null,
null,
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
false,
@ -179,12 +179,12 @@ public class IndexTaskSerdeTest
10,
ImmutableList.of("dim1", "dim2"),
null,
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
false,
@ -218,12 +218,12 @@ public class IndexTaskSerdeTest
null,
null,
new DynamicPartitionsSpec(1000, 2000L),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
true,
@ -256,12 +256,12 @@ public class IndexTaskSerdeTest
null,
null,
new HashedPartitionsSpec(null, 10, ImmutableList.of("dim1", "dim2")),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
null,
null,
false,

View File

@ -73,6 +73,7 @@ import org.apache.druid.segment.IndexIO;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.QueryableIndexStorageAdapter;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.handoff.SegmentHandoffNotifier;
import org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory;
import org.apache.druid.segment.incremental.RowIngestionMeters;
@ -171,7 +172,7 @@ public class IndexTaskTest extends IngestionTestBase
);
}
private static final IndexSpec INDEX_SPEC = new IndexSpec();
private static final IndexSpec INDEX_SPEC = IndexSpec.DEFAULT;
private final ObjectMapper jsonMapper;
private final IndexIO indexIO;
private final RowIngestionMetersFactory rowIngestionMetersFactory;
@ -2983,7 +2984,12 @@ public class IndexTaskTest extends IngestionTestBase
public void testEqualsAndHashCode()
{
EqualsVerifier.forClass(IndexTuningConfig.class)
.usingGetClass()
.verify();
.withPrefabValues(
IndexSpec.class,
IndexSpec.DEFAULT,
IndexSpec.builder().withDimensionCompression(CompressionStrategy.ZSTD).build()
)
.usingGetClass()
.verify();
}
}

View File

@ -65,7 +65,7 @@ import java.io.File;
public class TaskSerdeTest
{
private final ObjectMapper jsonMapper;
private final IndexSpec indexSpec = new IndexSpec();
private final IndexSpec indexSpec = IndexSpec.DEFAULT;
@Rule
public ExpectedException thrown = ExpectedException.none();
@ -106,7 +106,7 @@ public class TaskSerdeTest
);
Assert.assertFalse(tuningConfig.isReportParseExceptions());
Assert.assertEquals(new IndexSpec(), tuningConfig.getIndexSpec());
Assert.assertEquals(IndexSpec.DEFAULT, tuningConfig.getIndexSpec());
Assert.assertEquals(new Period(Integer.MAX_VALUE), tuningConfig.getIntermediatePersistPeriod());
Assert.assertEquals(0, tuningConfig.getMaxPendingPersists());
Assert.assertEquals(1000000, tuningConfig.getMaxRowsInMemory());

View File

@ -246,13 +246,13 @@ public class ParallelIndexSupervisorTaskTest
null,
null,
new HashedPartitionsSpec(null, 10, null),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
forceGuaranteedRollup,
true,
@ -318,13 +318,13 @@ public class ParallelIndexSupervisorTaskTest
null,
null,
new HashedPartitionsSpec(null, 10, null),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
true,
true,

View File

@ -78,13 +78,13 @@ public class ParallelIndexTuningConfigTest
null,
null,
new DynamicPartitionsSpec(100, 100L),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
false,
true,
@ -125,13 +125,13 @@ public class ParallelIndexTuningConfigTest
null,
null,
new DynamicPartitionsSpec(100, 100L),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
false,
true,
@ -172,13 +172,13 @@ public class ParallelIndexTuningConfigTest
null,
null,
new DynamicPartitionsSpec(100, 100L),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
false,
true,
@ -221,13 +221,13 @@ public class ParallelIndexTuningConfigTest
null,
null,
new DynamicPartitionsSpec(100, 100L),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
false,
true,
@ -267,13 +267,13 @@ public class ParallelIndexTuningConfigTest
null,
null,
new HashedPartitionsSpec(null, 10, null),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
forceGuaranteedRollup,
true,
@ -313,13 +313,13 @@ public class ParallelIndexTuningConfigTest
null,
null,
new SingleDimensionPartitionsSpec(100, null, "dim", false),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
forceGuaranteedRollup,
true,
@ -359,13 +359,13 @@ public class ParallelIndexTuningConfigTest
null,
null,
new DynamicPartitionsSpec(100, null),
new IndexSpec(
RoaringBitmapSerdeFactory.getInstance(),
CompressionStrategy.UNCOMPRESSED,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(),
IndexSpec.builder()
.withBitmapSerdeFactory(RoaringBitmapSerdeFactory.getInstance())
.withDimensionCompression(CompressionStrategy.UNCOMPRESSED)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.DEFAULT,
1,
forceGuaranteedRollup,
true,
@ -392,7 +392,12 @@ public class ParallelIndexTuningConfigTest
public void testEqualsAndHashCode()
{
EqualsVerifier.forClass(ParallelIndexTuningConfig.class)
.usingGetClass()
.verify();
.usingGetClass()
.withPrefabValues(
IndexSpec.class,
IndexSpec.DEFAULT,
IndexSpec.builder().withDimensionCompression(CompressionStrategy.ZSTD).build()
)
.verify();
}
}

View File

@ -793,7 +793,7 @@ public class DruidSegmentReaderTest extends NullHandlingTest
).persist(
incrementalIndex,
segmentDirectory,
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
segmentSize = FileUtils.getFileSize(segmentDirectory);

View File

@ -415,7 +415,7 @@ public class TaskLifecycleTest extends InitializedNullHandlingTest
// initialize variables
announcedSinks = 0;
pushedSegments = 0;
indexSpec = new IndexSpec();
indexSpec = IndexSpec.DEFAULT;
emitter = newMockEmitter();
EmittingLogger.registerEmitter(emitter);
mapper = TEST_UTILS.getTestObjectMapper();

View File

@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import org.apache.druid.segment.column.StringEncodingStrategy;
import org.apache.druid.segment.data.BitmapSerde;
import org.apache.druid.segment.data.BitmapSerdeFactory;
@ -44,6 +43,13 @@ import java.util.Objects;
*/
public class IndexSpec
{
public static IndexSpec DEFAULT = IndexSpec.builder().build();
public static Builder builder()
{
return new Builder();
}
private final BitmapSerdeFactory bitmapSerdeFactory;
private final CompressionStrategy dimensionCompression;
private final StringEncodingStrategy stringDictionaryEncoding;
@ -56,37 +62,6 @@ public class IndexSpec
@Nullable
private final SegmentizerFactory segmentLoader;
/**
* Creates an IndexSpec with default parameters
*/
public IndexSpec()
{
this(null, null, null, null, null, null, null);
}
@VisibleForTesting
public IndexSpec(
@Nullable BitmapSerdeFactory bitmapSerdeFactory,
@Nullable CompressionStrategy dimensionCompression,
@Nullable CompressionStrategy metricCompression,
@Nullable CompressionFactory.LongEncodingStrategy longEncoding
)
{
this(bitmapSerdeFactory, dimensionCompression, null, metricCompression, longEncoding, null, null);
}
@VisibleForTesting
public IndexSpec(
@Nullable BitmapSerdeFactory bitmapSerdeFactory,
@Nullable CompressionStrategy dimensionCompression,
@Nullable CompressionStrategy metricCompression,
@Nullable CompressionFactory.LongEncodingStrategy longEncoding,
@Nullable SegmentizerFactory segmentLoader
)
{
this(bitmapSerdeFactory, dimensionCompression, null, metricCompression, longEncoding, null, segmentLoader);
}
/**
* Creates an IndexSpec with the given storage format settings.
*
@ -232,4 +207,76 @@ public class IndexSpec
", segmentLoader=" + segmentLoader +
'}';
}
public static class Builder
{
@Nullable
private BitmapSerdeFactory bitmapSerdeFactory;
@Nullable
private CompressionStrategy dimensionCompression;
@Nullable
private StringEncodingStrategy stringDictionaryEncoding;
@Nullable
private CompressionStrategy metricCompression;
@Nullable
private CompressionFactory.LongEncodingStrategy longEncoding;
@Nullable
private CompressionStrategy jsonCompression;
@Nullable
private SegmentizerFactory segmentLoader;
public Builder withBitmapSerdeFactory(BitmapSerdeFactory bitmapSerdeFactory)
{
this.bitmapSerdeFactory = bitmapSerdeFactory;
return this;
}
public Builder withDimensionCompression(CompressionStrategy dimensionCompression)
{
this.dimensionCompression = dimensionCompression;
return this;
}
public Builder withStringDictionaryEncoding(StringEncodingStrategy stringDictionaryEncoding)
{
this.stringDictionaryEncoding = stringDictionaryEncoding;
return this;
}
public Builder withMetricCompression(CompressionStrategy metricCompression)
{
this.metricCompression = metricCompression;
return this;
}
public Builder withLongEncoding(CompressionFactory.LongEncodingStrategy longEncoding)
{
this.longEncoding = longEncoding;
return this;
}
public Builder withJsonCompression(CompressionStrategy jsonCompression)
{
this.jsonCompression = jsonCompression;
return this;
}
public Builder withSegmentLoader(SegmentizerFactory segmentLoader)
{
this.segmentLoader = segmentLoader;
return this;
}
public IndexSpec build()
{
return new IndexSpec(
bitmapSerdeFactory,
dimensionCompression,
stringDictionaryEncoding,
metricCompression,
longEncoding,
jsonCompression,
segmentLoader
);
}
}
}

View File

@ -521,14 +521,14 @@ public class VSizeLongSerde
}
for ( ; i + Byte.SIZE < length; index += Byte.SIZE) {
final byte unpack = buffer.get(offset + (index >> 3));
out[outPosition + i++] = base + (unpack >> 7) & 1;
out[outPosition + i++] = base + (unpack >> 6) & 1;
out[outPosition + i++] = base + (unpack >> 5) & 1;
out[outPosition + i++] = base + (unpack >> 4) & 1;
out[outPosition + i++] = base + (unpack >> 3) & 1;
out[outPosition + i++] = base + (unpack >> 2) & 1;
out[outPosition + i++] = base + (unpack >> 1) & 1;
out[outPosition + i++] = base + unpack & 1;
out[outPosition + i++] = base + ((unpack >> 7) & 1);
out[outPosition + i++] = base + ((unpack >> 6) & 1);
out[outPosition + i++] = base + ((unpack >> 5) & 1);
out[outPosition + i++] = base + ((unpack >> 4) & 1);
out[outPosition + i++] = base + ((unpack >> 3) & 1);
out[outPosition + i++] = base + ((unpack >> 2) & 1);
out[outPosition + i++] = base + ((unpack >> 1) & 1);
out[outPosition + i++] = base + (unpack & 1);
}
while (i < length) {
out[outPosition + i++] = base + get(index++);
@ -592,14 +592,14 @@ public class VSizeLongSerde
}
for ( ; i + 8 < length; index += 8) {
final short unpack = buffer.getShort(offset + (index >> 2));
out[outPosition + i++] = base + (unpack >> 14) & 3;
out[outPosition + i++] = base + (unpack >> 12) & 3;
out[outPosition + i++] = base + (unpack >> 10) & 3;
out[outPosition + i++] = base + (unpack >> 8) & 3;
out[outPosition + i++] = base + (unpack >> 6) & 3;
out[outPosition + i++] = base + (unpack >> 4) & 3;
out[outPosition + i++] = base + (unpack >> 2) & 3;
out[outPosition + i++] = base + unpack & 3;
out[outPosition + i++] = base + ((unpack >> 14) & 3);
out[outPosition + i++] = base + ((unpack >> 12) & 3);
out[outPosition + i++] = base + ((unpack >> 10) & 3);
out[outPosition + i++] = base + ((unpack >> 8) & 3);
out[outPosition + i++] = base + ((unpack >> 6) & 3);
out[outPosition + i++] = base + ((unpack >> 4) & 3);
out[outPosition + i++] = base + ((unpack >> 2) & 3);
out[outPosition + i++] = base + (unpack & 3);
}
while (i < length) {
out[outPosition + i++] = base + get(index++);
@ -659,18 +659,18 @@ public class VSizeLongSerde
// byte align
while ((index & 0x1) != 0 && i < length) {
out[outPosition + i++] = base + get(index++) & 0xF;
out[outPosition + i++] = base + (get(index++) & 0xF);
}
for ( ; i + 8 < length; index += 8) {
final int unpack = buffer.getInt(offset + (index >> 1));
out[outPosition + i++] = base + (unpack >> 28) & 0xF;
out[outPosition + i++] = base + (unpack >> 24) & 0xF;
out[outPosition + i++] = base + (unpack >> 20) & 0xF;
out[outPosition + i++] = base + (unpack >> 16) & 0xF;
out[outPosition + i++] = base + (unpack >> 12) & 0xF;
out[outPosition + i++] = base + (unpack >> 8) & 0xF;
out[outPosition + i++] = base + (unpack >> 4) & 0xF;
out[outPosition + i++] = base + unpack & 0xF;
out[outPosition + i++] = base + ((unpack >> 28) & 0xF);
out[outPosition + i++] = base + ((unpack >> 24) & 0xF);
out[outPosition + i++] = base + ((unpack >> 20) & 0xF);
out[outPosition + i++] = base + ((unpack >> 16) & 0xF);
out[outPosition + i++] = base + ((unpack >> 12) & 0xF);
out[outPosition + i++] = base + ((unpack >> 8) & 0xF);
out[outPosition + i++] = base + ((unpack >> 4) & 0xF);
out[outPosition + i++] = base + (unpack & 0xF);
}
while (i < length) {
out[outPosition + i++] = base + get(index++);
@ -725,7 +725,7 @@ public class VSizeLongSerde
public void getDelta(long[] out, int outPosition, int startIndex, int length, long base)
{
for (int i = 0, indexOffset = startIndex; i < length; i++, indexOffset++) {
out[outPosition + i] = base + buffer.get(offset + indexOffset) & 0xFF;
out[outPosition + i] = base + (buffer.get(offset + indexOffset) & 0xFF);
}
}
@ -795,7 +795,7 @@ public class VSizeLongSerde
int index = startIndex;
// every other value is byte aligned
if ((index & 0x1) != 0) {
out[outPosition + i++] = get(index++);
out[outPosition + i++] = base + get(index++);
}
final int unpackSize = Long.BYTES + Integer.BYTES;
for (int indexOffset = (index * 3) >> 1; i + 8 < length; indexOffset += unpackSize) {
@ -838,7 +838,7 @@ public class VSizeLongSerde
public void getDelta(long[] out, int outPosition, int startIndex, int length, long base)
{
for (int i = 0, indexOffset = (startIndex << 1); i < length; i++, indexOffset += Short.BYTES) {
out[outPosition + i] = base + buffer.getShort(offset + indexOffset) & 0xFFFF;
out[outPosition + i] = base + (buffer.getShort(offset + indexOffset) & 0xFFFF);
}
}
@ -851,7 +851,7 @@ public class VSizeLongSerde
return i;
}
out[outPosition + i] = base + buffer.getShort(offset + (index << 1)) & 0xFFFF;
out[outPosition + i] = base + (buffer.getShort(offset + (index << 1)) & 0xFFFF);
}
return length;
@ -884,7 +884,7 @@ public class VSizeLongSerde
int index = startIndex;
// every other value is byte aligned
if ((index & 0x1) != 0) {
out[outPosition + i++] = get(index++);
out[outPosition + i++] = base + get(index++);
}
final int unpackSize = Long.BYTES + Long.BYTES + Integer.BYTES;
for (int indexOffset = (index * 5) >> 1; i + 8 < length; indexOffset += unpackSize) {
@ -970,7 +970,7 @@ public class VSizeLongSerde
public void getDelta(long[] out, int outPosition, int startIndex, int length, long base)
{
for (int i = 0, indexOffset = (startIndex << 2); i < length; i++, indexOffset += Integer.BYTES) {
out[outPosition + i] = base + buffer.getInt(offset + indexOffset) & 0xFFFFFFFFL;
out[outPosition + i] = base + (buffer.getInt(offset + indexOffset) & 0xFFFFFFFFL);
}
}
}

View File

@ -358,7 +358,7 @@ public class DoubleStorageTest
File someTmpFile = File.createTempFile("billy", "yay");
someTmpFile.delete();
FileUtils.mkdirp(someTmpFile);
INDEX_MERGER_V9.persist(index, someTmpFile, new IndexSpec(), null);
INDEX_MERGER_V9.persist(index, someTmpFile, IndexSpec.DEFAULT, null);
someTmpFile.delete();
return INDEX_IO.loadIndex(someTmpFile);
}

View File

@ -174,7 +174,7 @@ public class MultiValuedDimensionTest extends InitializedNullHandlingTest
persistedSegmentDir = FileUtils.createTempDir();
TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory)
.persist(incrementalIndex, persistedSegmentDir, new IndexSpec(), null);
.persist(incrementalIndex, persistedSegmentDir, IndexSpec.DEFAULT, null);
queryableIndex = TestHelper.getTestIndexIO().loadIndex(persistedSegmentDir);
@ -207,7 +207,7 @@ public class MultiValuedDimensionTest extends InitializedNullHandlingTest
}
persistedSegmentDirNullSampler = FileUtils.createTempDir();
TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory)
.persist(incrementalIndexNullSampler, persistedSegmentDirNullSampler, new IndexSpec(), null);
.persist(incrementalIndexNullSampler, persistedSegmentDirNullSampler, IndexSpec.DEFAULT, null);
queryableIndexNullSampler = TestHelper.getTestIndexIO().loadIndex(persistedSegmentDirNullSampler);
}

View File

@ -210,7 +210,7 @@ public class NestedDataTestUtils
COUNT,
granularity,
rollup,
new IndexSpec()
IndexSpec.DEFAULT
);
}
@ -236,7 +236,7 @@ public class NestedDataTestUtils
SIMPLE_DATA_FILE,
Granularities.NONE,
true,
new IndexSpec()
IndexSpec.DEFAULT
);
}
@ -322,7 +322,7 @@ public class NestedDataTestUtils
COUNT,
granularity,
rollup,
new IndexSpec()
IndexSpec.DEFAULT
);
}
@ -505,7 +505,7 @@ public class NestedDataTestUtils
tempFolder,
closer,
jsonInputFile,
new IndexSpec()
IndexSpec.DEFAULT
)
)
.add(NestedDataTestUtils.createIncrementalIndexForJsonInput(tempFolder, jsonInputFile))
@ -556,7 +556,7 @@ public class NestedDataTestUtils
tempFolder,
closer,
jsonInputFile,
new IndexSpec()
IndexSpec.DEFAULT
)
)
.addAll(
@ -564,7 +564,7 @@ public class NestedDataTestUtils
tempFolder,
closer,
jsonInputFile,
new IndexSpec()
IndexSpec.DEFAULT
)
)
.build();
@ -592,15 +592,11 @@ public class NestedDataTestUtils
tempFolder,
closer,
jsonInputFile,
new IndexSpec(
null,
null,
new StringEncodingStrategy.FrontCoded(4, (byte) 0x01),
null,
null,
null,
null
)
IndexSpec.builder()
.withStringDictionaryEncoding(
new StringEncodingStrategy.FrontCoded(4, (byte) 0x01)
)
.build()
)
)
.addAll(
@ -608,15 +604,11 @@ public class NestedDataTestUtils
tempFolder,
closer,
jsonInputFile,
new IndexSpec(
null,
null,
new StringEncodingStrategy.FrontCoded(4, (byte) 0x00),
null,
null,
null,
null
)
IndexSpec.builder()
.withStringDictionaryEncoding(
new StringEncodingStrategy.FrontCoded(4, (byte) 0x00)
)
.build()
)
)
.build();

View File

@ -543,7 +543,7 @@ public class AggregationTestHelper implements Closeable
if (!index.canAppendRow()) {
File tmp = tempFolder.newFolder();
toMerge.add(tmp);
indexMerger.persist(index, tmp, new IndexSpec(), null);
indexMerger.persist(index, tmp, IndexSpec.DEFAULT, null);
index.close();
index = new OnheapIncrementalIndex.Builder()
.setIndexSchema(
@ -572,19 +572,19 @@ public class AggregationTestHelper implements Closeable
if (toMerge.size() > 0) {
File tmp = tempFolder.newFolder();
toMerge.add(tmp);
indexMerger.persist(index, tmp, new IndexSpec(), null);
indexMerger.persist(index, tmp, IndexSpec.DEFAULT, null);
List<QueryableIndex> indexes = new ArrayList<>(toMerge.size());
for (File file : toMerge) {
indexes.add(indexIO.loadIndex(file));
}
indexMerger.mergeQueryableIndex(indexes, rollup, metrics, outDir, new IndexSpec(), null, -1);
indexMerger.mergeQueryableIndex(indexes, rollup, metrics, outDir, IndexSpec.DEFAULT, null, -1);
for (QueryableIndex qi : indexes) {
qi.close();
}
} else {
indexMerger.persist(index, outDir, new IndexSpec(), null);
indexMerger.persist(index, outDir, IndexSpec.DEFAULT, null);
}
}
finally {
@ -679,7 +679,7 @@ public class AggregationTestHelper implements Closeable
if (outDir == null) {
outDir = tempFolder.newFolder();
}
indexMerger.persist(index, outDir, new IndexSpec(), null);
indexMerger.persist(index, outDir, IndexSpec.DEFAULT, null);
return new QueryableIndexSegment(indexIO.loadIndex(outDir), SegmentId.dummy(""));
}

View File

@ -209,7 +209,7 @@ public class GroupByLimitPushDownInsufficientBufferTest extends InitializedNullH
final File fileA = INDEX_MERGER_V9.persist(
indexA,
new File(tmpDir, "A"),
new IndexSpec(),
IndexSpec.DEFAULT,
OffHeapMemorySegmentWriteOutMediumFactory.instance()
);
QueryableIndex qindexA = INDEX_IO.loadIndex(fileA);
@ -251,7 +251,7 @@ public class GroupByLimitPushDownInsufficientBufferTest extends InitializedNullH
final File fileB = INDEX_MERGER_V9.persist(
indexB,
new File(tmpDir, "B"),
new IndexSpec(),
IndexSpec.DEFAULT,
OffHeapMemorySegmentWriteOutMediumFactory.instance()
);
QueryableIndex qindexB = INDEX_IO.loadIndex(fileB);

View File

@ -206,7 +206,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest
final File fileA = INDEX_MERGER_V9.persist(
indexA,
new File(tmpDir, "A"),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
QueryableIndex qindexA = INDEX_IO.loadIndex(fileA);
@ -242,7 +242,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest
final File fileB = INDEX_MERGER_V9.persist(
indexB,
new File(tmpDir, "B"),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
QueryableIndex qindexB = INDEX_IO.loadIndex(fileB);
@ -277,7 +277,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest
final File fileC = INDEX_MERGER_V9.persist(
indexC,
new File(tmpDir, "C"),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
QueryableIndex qindexC = INDEX_IO.loadIndex(fileC);
@ -313,7 +313,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest
final File fileD = INDEX_MERGER_V9.persist(
indexD,
new File(tmpDir, "D"),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
QueryableIndex qindexD = INDEX_IO.loadIndex(fileD);
@ -428,7 +428,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest
final File fileE = INDEX_MERGER_V9.persist(
indexE,
new File(tmpDir, "E"),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
QueryableIndex qindexE = INDEX_IO.loadIndex(fileE);
@ -523,7 +523,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest
final File fileF = INDEX_MERGER_V9.persist(
indexF,
new File(tmpDir, "F"),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
QueryableIndex qindexF = INDEX_IO.loadIndex(fileF);

View File

@ -167,7 +167,7 @@ public class GroupByMultiSegmentTest
final File fileA = INDEX_MERGER_V9.persist(
indexA,
new File(tmpDir, "A"),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
QueryableIndex qindexA = INDEX_IO.loadIndex(fileA);
@ -189,7 +189,7 @@ public class GroupByMultiSegmentTest
final File fileB = INDEX_MERGER_V9.persist(
indexB,
new File(tmpDir, "B"),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
QueryableIndex qindexB = INDEX_IO.loadIndex(fileB);

View File

@ -191,7 +191,7 @@ public class NestedQueryPushDownTest extends InitializedNullHandlingTest
final File fileA = INDEX_MERGER_V9.persist(
indexA,
new File(tmpDir, "A"),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
QueryableIndex qindexA = INDEX_IO.loadIndex(fileA);
@ -235,7 +235,7 @@ public class NestedQueryPushDownTest extends InitializedNullHandlingTest
final File fileB = INDEX_MERGER_V9.persist(
indexB,
new File(tmpDir, "B"),
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
QueryableIndex qindexB = INDEX_IO.loadIndex(fileB);

View File

@ -137,7 +137,7 @@ public class NestedDataScanQueryTest extends InitializedNullHandlingTest
NestedDataTestUtils.COUNT,
Granularities.YEAR,
true,
new IndexSpec()
IndexSpec.DEFAULT
)
).build();
@ -340,7 +340,7 @@ public class NestedDataScanQueryTest extends InitializedNullHandlingTest
NestedDataTestUtils.SIMPLE_DATA_FILE,
Granularities.HOUR,
true,
new IndexSpec()
IndexSpec.DEFAULT
);
final Sequence<ScanResultValue> seq = helper.runQueryOnSegmentsObjs(segs, scanQuery);
@ -524,7 +524,7 @@ public class NestedDataScanQueryTest extends InitializedNullHandlingTest
NestedDataTestUtils.COUNT,
Granularities.DAY,
true,
new IndexSpec()
IndexSpec.DEFAULT
);
@ -584,7 +584,7 @@ public class NestedDataScanQueryTest extends InitializedNullHandlingTest
aggs,
Granularities.NONE,
true,
new IndexSpec()
IndexSpec.DEFAULT
);

View File

@ -82,7 +82,7 @@ public class CustomSegmentizerFactoryTest extends InitializedNullHandlingTest
data,
Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"),
segment,
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
@ -101,13 +101,7 @@ public class CustomSegmentizerFactoryTest extends InitializedNullHandlingTest
data,
Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"),
segment,
new IndexSpec(
null,
null,
null,
null,
new CustomSegmentizerFactory()
),
IndexSpec.builder().withSegmentLoader(new CustomSegmentizerFactory()).build(),
null
);

View File

@ -90,7 +90,7 @@ public class EmptyIndexTest
new AggregatorFactory[0],
tmpDir,
DimensionsSpec.EMPTY,
new IndexSpec(),
IndexSpec.DEFAULT,
-1
);

View File

@ -73,7 +73,7 @@ public class IndexBuilder
private SegmentWriteOutMediumFactory segmentWriteOutMediumFactory = OffHeapMemorySegmentWriteOutMediumFactory.instance();
private IndexMerger indexMerger;
private File tmpDir;
private IndexSpec indexSpec = new IndexSpec();
private IndexSpec indexSpec = IndexSpec.DEFAULT;
private int maxRows = DEFAULT_MAX_ROWS;
private int intermediatePersistSize = ROWS_PER_INDEX_FOR_MERGING;
private IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder()

View File

@ -100,7 +100,7 @@ public class IndexIONullColumnsCompatibilityTest extends InitializedNullHandling
segmentDir = indexMerger.persist(
incrementalIndex,
temporaryFolder.newFolder(),
new IndexSpec(),
IndexSpec.DEFAULT,
OffHeapMemorySegmentWriteOutMediumFactory.instance()
);
}

View File

@ -66,12 +66,12 @@ import java.util.Objects;
public class IndexIOTest extends InitializedNullHandlingTest
{
private static Interval DEFAULT_INTERVAL = Intervals.of("1970-01-01/2000-01-01");
private static final IndexSpec INDEX_SPEC = IndexMergerTestBase.makeIndexSpec(
new ConciseBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.LZ4,
CompressionFactory.LongEncodingStrategy.LONGS
);
private static final IndexSpec INDEX_SPEC = IndexSpec.builder()
.withBitmapSerdeFactory(new ConciseBitmapSerdeFactory())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZ4)
.withLongEncoding(CompressionFactory.LongEncodingStrategy.LONGS)
.build();
static {
NullHandling.initializeForTests();

View File

@ -74,7 +74,7 @@ public class IndexMergerNullHandlingTest
{
indexMerger = TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance());
indexIO = TestHelper.getTestIndexIO();
indexSpec = new IndexSpec();
indexSpec = IndexSpec.DEFAULT;
}
@Test

View File

@ -58,7 +58,7 @@ public class IndexMergerRollupTest extends InitializedNullHandlingTest
indexMerger = TestHelper
.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance());
indexIO = TestHelper.getTestIndexIO();
indexSpec = new IndexSpec();
indexSpec = IndexSpec.DEFAULT;
}
private void testStringFirstLastRollup(

View File

@ -114,25 +114,6 @@ public class IndexMergerTestBase extends InitializedNullHandlingTest
);
}
static IndexSpec makeIndexSpec(
BitmapSerdeFactory bitmapSerdeFactory,
CompressionStrategy compressionStrategy,
CompressionStrategy dimCompressionStrategy,
CompressionFactory.LongEncodingStrategy longEncodingStrategy
)
{
if (bitmapSerdeFactory != null || compressionStrategy != null) {
return new IndexSpec(
bitmapSerdeFactory,
dimCompressionStrategy,
compressionStrategy,
longEncodingStrategy
);
} else {
return new IndexSpec();
}
}
static BitmapValues getBitmapIndex(QueryableIndexIndexableAdapter adapter, String dimension, String value)
{
final ColumnHolder columnHolder = adapter.getQueryableIndex().getColumnHolder(dimension);
@ -170,12 +151,12 @@ public class IndexMergerTestBase extends InitializedNullHandlingTest
CompressionFactory.LongEncodingStrategy longEncodingStrategy
)
{
this.indexSpec = makeIndexSpec(
bitmapSerdeFactory != null ? bitmapSerdeFactory : new ConciseBitmapSerdeFactory(),
compressionStrategy,
dimCompressionStrategy,
longEncodingStrategy
);
this.indexSpec = IndexSpec.builder()
.withBitmapSerdeFactory(bitmapSerdeFactory != null ? bitmapSerdeFactory : new ConciseBitmapSerdeFactory())
.withDimensionCompression(dimCompressionStrategy)
.withMetricCompression(compressionStrategy)
.withLongEncoding(longEncodingStrategy)
.build();
this.indexIO = TestHelper.getTestIndexIO();
this.useBitmapIndexes = bitmapSerdeFactory != null;
}
@ -516,18 +497,20 @@ public class IndexMergerTestBase extends InitializedNullHandlingTest
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index1.getAvailableDimensions()));
Assert.assertEquals(3, index1.getColumnNames().size());
IndexSpec newSpec = new IndexSpec(
indexSpec.getBitmapSerdeFactory(),
CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ?
CompressionStrategy.LZF :
CompressionStrategy.LZ4,
CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ?
CompressionStrategy.LZF :
CompressionStrategy.LZ4,
CompressionFactory.LongEncodingStrategy.LONGS.equals(indexSpec.getLongEncoding()) ?
CompressionFactory.LongEncodingStrategy.AUTO :
CompressionFactory.LongEncodingStrategy.LONGS
);
IndexSpec.Builder builder = IndexSpec.builder().withBitmapSerdeFactory(indexSpec.getBitmapSerdeFactory());
if (CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression())) {
builder.withDimensionCompression(CompressionStrategy.LZF)
.withMetricCompression(CompressionStrategy.LZF);
} else {
builder.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZ4);
}
if (CompressionFactory.LongEncodingStrategy.LONGS.equals(indexSpec.getLongEncoding())) {
builder.withLongEncoding(CompressionFactory.LongEncodingStrategy.AUTO);
} else {
builder.withLongEncoding(CompressionFactory.LongEncodingStrategy.LONGS);
}
IndexSpec newSpec = builder.build();
AggregatorFactory[] mergedAggregators = new AggregatorFactory[]{new CountAggregatorFactory("count")};
QueryableIndex merged = closer.closeLater(

View File

@ -89,7 +89,7 @@ public class IndexMergerV9WithSpatialIndexTest extends InitializedNullHandlingTe
IndexMergerV9 indexMergerV9 = TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory);
IndexIO indexIO = TestHelper.getTestIndexIO();
final IndexSpec indexSpec = new IndexSpec();
final IndexSpec indexSpec = IndexSpec.DEFAULT;
final IncrementalIndex rtIndex = makeIncrementalIndex();
final QueryableIndex mMappedTestIndex = makeQueryableIndex(indexSpec, indexMergerV9, indexIO);
final QueryableIndex mergedRealtimeIndex = makeMergedQueryableIndex(indexSpec, indexMergerV9, indexIO);

View File

@ -66,17 +66,17 @@ public class IndexSpecTest
@Test
public void testDefaults()
{
final IndexSpec spec = new IndexSpec();
final IndexSpec spec = IndexSpec.DEFAULT;
Assert.assertEquals(CompressionStrategy.LZ4, spec.getDimensionCompression());
Assert.assertEquals(CompressionStrategy.LZ4, spec.getMetricCompression());
Assert.assertEquals(CompressionFactory.LongEncodingStrategy.LONGS, spec.getLongEncoding());
Assert.assertEquals(LongEncodingStrategy.LONGS, spec.getLongEncoding());
}
@Test
public void testAsMap()
{
final ObjectMapper objectMapper = new DefaultObjectMapper();
final IndexSpec spec = new IndexSpec();
final IndexSpec spec = IndexSpec.DEFAULT;
final Map<String, Object> map = spec.asMap(objectMapper);
Assert.assertEquals(
spec.getBitmapSerdeFactory(),
@ -99,6 +99,15 @@ public class IndexSpecTest
@Test
public void testEquals()
{
EqualsVerifier.forClass(IndexSpec.class).usingGetClass().verify();
EqualsVerifier.forClass(IndexSpec.class)
.withPrefabValues(
IndexSpec.class,
IndexSpec.DEFAULT,
IndexSpec.builder()
.withJsonCompression(CompressionStrategy.ZSTD)
.build()
)
.usingGetClass()
.verify();
}
}

View File

@ -43,13 +43,12 @@ import java.util.Collection;
@RunWith(Parameterized.class)
public class QueryableIndexIndexableAdapterTest
{
private static final IndexSpec INDEX_SPEC = IndexMergerTestBase.makeIndexSpec(
new ConciseBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.LZ4,
CompressionFactory.LongEncodingStrategy.LONGS
);
private static final IndexSpec INDEX_SPEC = IndexSpec.builder()
.withBitmapSerdeFactory(new ConciseBitmapSerdeFactory())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZ4)
.withLongEncoding(CompressionFactory.LongEncodingStrategy.LONGS)
.build();
@Parameterized.Parameters
public static Collection<?> constructorFeeder()

View File

@ -76,7 +76,7 @@ public class SchemalessIndexTest
new CountAggregatorFactory("count")
};
private static final IndexSpec INDEX_SPEC = new IndexSpec();
private static final IndexSpec INDEX_SPEC = IndexSpec.DEFAULT;
private static final List<Map<String, Object>> EVENTS = new ArrayList<>();

View File

@ -146,7 +146,7 @@ public class TestIndex
new DoubleMaxAggregatorFactory(DOUBLE_METRICS[2], VIRTUAL_COLUMNS.getVirtualColumns()[0].getOutputName()),
new HyperUniquesAggregatorFactory("quality_uniques", "quality")
};
public static final IndexSpec INDEX_SPEC = new IndexSpec();
public static final IndexSpec INDEX_SPEC = IndexSpec.DEFAULT;
public static final IndexMerger INDEX_MERGER =
TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance());
@ -215,15 +215,11 @@ public class TestIndex
private static Supplier<QueryableIndex> frontCodedMmappedIndex = Suppliers.memoize(
() -> persistRealtimeAndLoadMMapped(
realtimeIndex.get(),
new IndexSpec(
null,
null,
new StringEncodingStrategy.FrontCoded(4, FrontCodedIndexed.V1),
null,
null,
null,
null
)
IndexSpec.builder()
.withStringDictionaryEncoding(
new StringEncodingStrategy.FrontCoded(4, FrontCodedIndexed.V1)
)
.build()
)
);

View File

@ -247,6 +247,22 @@ public class VSizeLongSerdeTest
values[i],
out[outPosition]
);
int delta = 100_000;
deserializer.getDelta(out, outPosition, i, 1, delta);
Assert.assertEquals(
StringUtils.format("Deserializer (testContiguousGetSingleRow, numBits = %d, position = %d)", numBits, i),
values[i] + delta,
out[outPosition]
);
deserializer.getDelta(out, outPosition, i, 1, -delta);
Assert.assertEquals(
StringUtils.format("Deserializer (testContiguousGetSingleRow, numBits = %d, position = %d)", numBits, i),
values[i] - delta,
out[outPosition]
);
}
}
@ -266,6 +282,27 @@ public class VSizeLongSerdeTest
values,
Arrays.stream(out).skip(outPosition).toArray()
);
final long[] valuesPlus = new long[values.length];
final long[] valuesMinus = new long[values.length];
final int delta = 100_000;
for (int i = 0; i < values.length; i++) {
valuesPlus[i] = values[i] + delta;
valuesMinus[i] = values[i] - delta;
}
deserializer.getDelta(out, outPosition, 0, values.length, delta);
Assert.assertArrayEquals(
StringUtils.format("Deserializer (testContiguousGetWholeRegion, numBits = %d)", numBits),
valuesPlus,
Arrays.stream(out).skip(outPosition).toArray()
);
deserializer.getDelta(out, outPosition, 0, values.length, -delta);
Assert.assertArrayEquals(
StringUtils.format("Deserializer (testContiguousGetWholeRegion, numBits = %d)", numBits),
valuesMinus,
Arrays.stream(out).skip(outPosition).toArray()
);
}
private static void testNoncontiguousGetSingleRow(
@ -291,6 +328,23 @@ public class VSizeLongSerdeTest
values[i],
out[outPosition]
);
int delta = 100_000;
deserializer.getDelta(out, outPosition, indexes, 1, indexOffset, values.length, delta);
Assert.assertEquals(
StringUtils.format("Deserializer (testNoncontiguousGetSingleRow, numBits = %d, position = %d)", numBits, i),
values[i] + delta,
out[outPosition]
);
deserializer.getDelta(out, outPosition, indexes, 1, indexOffset, values.length, -delta);
Assert.assertEquals(
StringUtils.format("Deserializer (testNoncontiguousGetSingleRow, numBits = %d, position = %d)", numBits, i),
values[i] - delta,
out[outPosition]
);
}
}
@ -304,17 +358,24 @@ public class VSizeLongSerdeTest
final int outPosition = 1;
final long[] out = new long[values.length + outPosition];
final long[] expectedOut = new long[values.length + outPosition];
final long[] expectedOutDeltaPlus = new long[values.length + outPosition];
final long[] expectedOutDeltaMinus = new long[values.length + outPosition];
final int[] indexes = new int[values.length + outPosition];
Arrays.fill(out, -1);
Arrays.fill(expectedOut, -1);
Arrays.fill(expectedOutDeltaPlus, -1);
Arrays.fill(expectedOutDeltaMinus, -1);
Arrays.fill(indexes, -1);
final int delta = 100_000;
int cnt = 0;
for (int i = 0; i < values.length; i++) {
if (i % 2 == 0) {
indexes[outPosition + i / 2] = i + indexOffset;
expectedOut[outPosition + i / 2] = values[i];
expectedOutDeltaPlus[outPosition + i / 2] = values[i] + delta;
expectedOutDeltaMinus[outPosition + i / 2] = values[i] - delta;
cnt++;
}
}
@ -326,6 +387,22 @@ public class VSizeLongSerdeTest
expectedOut,
out
);
deserializer.getDelta(out, outPosition, indexes, cnt, indexOffset, values.length, delta);
Assert.assertArrayEquals(
StringUtils.format("Deserializer (testNoncontiguousGetEveryOtherValue, numBits = %d)", numBits),
expectedOutDeltaPlus,
out
);
deserializer.getDelta(out, outPosition, indexes, cnt, indexOffset, values.length, -delta);
Assert.assertArrayEquals(
StringUtils.format("Deserializer (testNoncontiguousGetEveryOtherValue, numBits = %d)", numBits),
expectedOutDeltaMinus,
out
);
}
private static void testNoncontiguousGetEveryOtherValueWithLimit(
@ -338,11 +415,16 @@ public class VSizeLongSerdeTest
final int outPosition = 1;
final long[] out = new long[values.length + outPosition];
final long[] expectedOut = new long[values.length + outPosition];
final long[] expectedOutDeltaPlus = new long[values.length + outPosition];
final long[] expectedOutDeltaMinus = new long[values.length + outPosition];
final int[] indexes = new int[values.length + outPosition];
final int limit = values.length - 2; // Don't do the last value
final int delta = 100_000;
Arrays.fill(out, -1);
Arrays.fill(expectedOut, -1);
Arrays.fill(expectedOutDeltaPlus, -1);
Arrays.fill(expectedOutDeltaMinus, -1);
Arrays.fill(indexes, -1);
int cnt = 0;
@ -352,13 +434,15 @@ public class VSizeLongSerdeTest
if (i < limit) {
expectedOut[outPosition + i / 2] = values[i];
expectedOutDeltaPlus[outPosition + i / 2] = values[i] + delta;
expectedOutDeltaMinus[outPosition + i / 2] = values[i] - delta;
}
cnt++;
}
}
final int ret = deserializer.getDelta(out, outPosition, indexes, cnt, indexOffset, limit, 0);
int ret = deserializer.getDelta(out, outPosition, indexes, cnt, indexOffset, limit, 0);
Assert.assertArrayEquals(
StringUtils.format("Deserializer (testNoncontiguousGetEveryOtherValue, numBits = %d)", numBits),
@ -367,5 +451,25 @@ public class VSizeLongSerdeTest
);
Assert.assertEquals(Math.max(0, cnt - 1), ret);
ret = deserializer.getDelta(out, outPosition, indexes, cnt, indexOffset, limit, delta);
Assert.assertArrayEquals(
StringUtils.format("Deserializer (testNoncontiguousGetEveryOtherValue, numBits = %d)", numBits),
expectedOutDeltaPlus,
out
);
Assert.assertEquals(Math.max(0, cnt - 1), ret);
ret = deserializer.getDelta(out, outPosition, indexes, cnt, indexOffset, limit, -delta);
Assert.assertArrayEquals(
StringUtils.format("Deserializer (testNoncontiguousGetEveryOtherValue, numBits = %d)", numBits),
expectedOutDeltaMinus,
out
);
Assert.assertEquals(Math.max(0, cnt - 1), ret);
}
}

View File

@ -375,15 +375,10 @@ public abstract class BaseFilterTest extends InitializedNullHandlingTest
.create()
.schema(DEFAULT_INDEX_SCHEMA)
.indexSpec(
new IndexSpec(
bitmapSerdeFactoryEntry.getValue(),
null,
encodingStrategy,
null,
null,
null,
null
)
IndexSpec.builder()
.withBitmapSerdeFactory(bitmapSerdeFactoryEntry.getValue())
.withStringDictionaryEncoding(encodingStrategy)
.build()
)
.segmentWriteOutMediumFactory(segmentWriteOutMediumFactoryEntry.getValue());
constructors.add(new Object[]{testName, indexBuilder, finisherEntry.getValue(), cnf, optimize});

View File

@ -103,7 +103,7 @@ public class SpatialFilterBonusTest
for (SegmentWriteOutMediumFactory segmentWriteOutMediumFactory : SegmentWriteOutMediumFactory.builtInFactories()) {
IndexMerger indexMerger = TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory);
IndexIO indexIO = TestHelper.getTestIndexIO();
final IndexSpec indexSpec = new IndexSpec();
final IndexSpec indexSpec = IndexSpec.DEFAULT;
final IncrementalIndex rtIndex = makeIncrementalIndex();
final QueryableIndex mMappedTestIndex = makeQueryableIndex(indexSpec, indexMerger, indexIO);
final QueryableIndex mergedRealtimeIndex = makeMergedQueryableIndex(indexSpec, indexMerger, indexIO);

View File

@ -96,16 +96,11 @@ public class SpatialFilterTest extends InitializedNullHandlingTest
@Parameterized.Parameters
public static Collection<?> constructorFeeder() throws IOException
{
final IndexSpec indexSpec = new IndexSpec();
final IndexSpec frontCodedIndexSpec = new IndexSpec(
null,
null,
new StringEncodingStrategy.FrontCoded(4, FrontCodedIndexed.V1),
null,
null,
null,
null
);
final IndexSpec indexSpec = IndexSpec.DEFAULT;
final IndexSpec frontCodedIndexSpec =
IndexSpec.builder()
.withStringDictionaryEncoding(new StringEncodingStrategy.FrontCoded(4, FrontCodedIndexed.V1))
.build();
final IncrementalIndex rtIndex = makeIncrementalIndex();
final QueryableIndex mMappedTestIndex = makeQueryableIndex(indexSpec);
final QueryableIndex mergedRealtimeIndex = makeMergedQueryableIndex(indexSpec);

View File

@ -112,7 +112,7 @@ public class SegmentGenerator implements Closeable
final int numRows
)
{
return generate(dataSegment, schemaInfo, schemaInfo.getDimensionsSpec(), TransformSpec.NONE, new IndexSpec(), granularity, numRows);
return generate(dataSegment, schemaInfo, schemaInfo.getDimensionsSpec(), TransformSpec.NONE, IndexSpec.DEFAULT, granularity, numRows);
}
public QueryableIndex generate(

View File

@ -47,12 +47,13 @@ import java.util.function.Function;
@RunWith(Parameterized.class)
public class IncrementalIndexAdapterTest extends InitializedNullHandlingTest
{
private static final IndexSpec INDEX_SPEC = new IndexSpec(
new ConciseBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.LZ4,
CompressionFactory.LongEncodingStrategy.LONGS
);
private static final IndexSpec INDEX_SPEC =
IndexSpec.builder()
.withBitmapSerdeFactory(new ConciseBitmapSerdeFactory())
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZ4)
.withLongEncoding(CompressionFactory.LongEncodingStrategy.LONGS)
.build();
public final IncrementalIndexCreator indexCreator;

View File

@ -120,7 +120,7 @@ public class BroadcastSegmentIndexedTableTest extends InitializedNullHandlingTes
data,
testInterval,
segment,
new IndexSpec(),
IndexSpec.DEFAULT,
null
);
File factoryJson = new File(persisted, "factory.json");

View File

@ -91,13 +91,7 @@ public class BroadcastJoinableMMappedQueryableSegmentizerFactoryTest extends Ini
data,
testInterval,
segment,
new IndexSpec(
null,
null,
null,
null,
expectedFactory
),
IndexSpec.builder().withSegmentLoader(expectedFactory).build(),
null
);

View File

@ -195,7 +195,7 @@ public class NestedDataColumnSupplierTest extends InitializedNullHandlingTest
try (final FileSmoosher smoosher = new FileSmoosher(tmpFile)) {
NestedDataColumnSerializer serializer = new NestedDataColumnSerializer(
fileNameBase,
new IndexSpec(),
IndexSpec.DEFAULT,
writeOutMediumFactory.makeSegmentWriteOutMedium(tempFolder.newFolder()),
closer
);

View File

@ -167,7 +167,7 @@ public class NestedDataColumnSupplierV4Test extends InitializedNullHandlingTest
try (final FileSmoosher smoosher = new FileSmoosher(tmpFile)) {
NestedDataColumnSerializerV4 serializer = new NestedDataColumnSerializerV4(
fileNameBase,
new IndexSpec(),
IndexSpec.DEFAULT,
writeOutMediumFactory.makeSegmentWriteOutMedium(tempFolder.newFolder()),
closer
);

View File

@ -338,7 +338,7 @@ public class NestedFieldColumnSelectorsTest
NestedDataTestUtils.COUNT,
Granularities.NONE,
true,
new IndexSpec()
IndexSpec.DEFAULT
);
Assert.assertEquals(1, segments.size());
StorageAdapter storageAdapter = segments.get(0).asStorageAdapter();
@ -369,7 +369,7 @@ public class NestedFieldColumnSelectorsTest
NestedDataTestUtils.COUNT,
Granularities.NONE,
true,
new IndexSpec()
IndexSpec.DEFAULT
);
Assert.assertEquals(1, segments.size());
StorageAdapter storageAdapter = segments.get(0).asStorageAdapter();

View File

@ -122,7 +122,7 @@ public class ScalarDoubleColumnSupplierTest extends InitializedNullHandlingTest
try (final FileSmoosher smoosher = new FileSmoosher(tmpFile)) {
ScalarDoubleColumnSerializer serializer = new ScalarDoubleColumnSerializer(
fileNameBase,
new IndexSpec(),
IndexSpec.DEFAULT,
writeOutMediumFactory.makeSegmentWriteOutMedium(tempFolder.newFolder()),
closer
);

View File

@ -122,7 +122,7 @@ public class ScalarLongColumnSupplierTest extends InitializedNullHandlingTest
try (final FileSmoosher smoosher = new FileSmoosher(tmpFile)) {
ScalarLongColumnSerializer serializer = new ScalarLongColumnSerializer(
fileNameBase,
new IndexSpec(),
IndexSpec.DEFAULT,
writeOutMediumFactory.makeSegmentWriteOutMedium(tempFolder.newFolder()),
closer
);

View File

@ -125,7 +125,7 @@ public class ScalarStringColumnSupplierTest extends InitializedNullHandlingTest
try (final FileSmoosher smoosher = new FileSmoosher(tmpFile)) {
ScalarStringColumnSerializer serializer = new ScalarStringColumnSerializer(
fileNameBase,
new IndexSpec(),
IndexSpec.DEFAULT,
writeOutMediumFactory.makeSegmentWriteOutMedium(tempFolder.newFolder()),
closer
);

View File

@ -119,7 +119,7 @@ public class VariantArrayColumnSupplierTest extends InitializedNullHandlingTest
try (final FileSmoosher smoosher = new FileSmoosher(tmpFile)) {
VariantArrayColumnSerializer serializer = new VariantArrayColumnSerializer(
fileNameBase,
new IndexSpec(),
IndexSpec.DEFAULT,
writeOutMediumFactory.makeSegmentWriteOutMedium(tempFolder.newFolder()),
closer
);

View File

@ -32,10 +32,14 @@ import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.segment.ColumnValueSelector;
import org.apache.druid.segment.Cursor;
import org.apache.druid.segment.DeprecatedQueryableIndexColumnSelector;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.QueryableIndexStorageAdapter;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.ColumnCapabilities;
import org.apache.druid.segment.column.StringEncodingStrategy;
import org.apache.druid.segment.data.CompressionFactory;
import org.apache.druid.segment.data.FrontCodedIndexed;
import org.apache.druid.segment.generator.GeneratorBasicSchemas;
import org.apache.druid.segment.generator.GeneratorSchemaInfo;
import org.apache.druid.segment.generator.SegmentGenerator;
@ -46,6 +50,7 @@ import org.apache.druid.segment.vector.VectorValueSelector;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
@ -58,7 +63,6 @@ import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@RunWith(Parameterized.class)
public class ExpressionVectorSelectorsTest extends InitializedNullHandlingTest
@ -98,6 +102,7 @@ public class ExpressionVectorSelectorsTest extends InitializedNullHandlingTest
private static final int ROWS_PER_SEGMENT = 10_000;
private static QueryableIndex INDEX;
private static QueryableIndex INDEX_OTHER_ENCODINGS;
private static Closer CLOSER;
@BeforeClass
@ -115,10 +120,33 @@ public class ExpressionVectorSelectorsTest extends InitializedNullHandlingTest
.size(0)
.build();
final SegmentGenerator segmentGenerator = CLOSER.register(new SegmentGenerator());
INDEX = CLOSER.register(
segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, ROWS_PER_SEGMENT)
);
final SegmentGenerator otherGenerator = CLOSER.register(new SegmentGenerator());
final DataSegment otherSegment = DataSegment.builder()
.dataSource("foo")
.interval(schemaInfo.getDataInterval())
.version("2")
.shardSpec(new LinearShardSpec(0))
.size(0)
.build();
IndexSpec otherEncodings = IndexSpec.builder()
.withStringDictionaryEncoding(
new StringEncodingStrategy.FrontCoded(16, FrontCodedIndexed.V1)
)
.withLongEncoding(CompressionFactory.LongEncodingStrategy.AUTO)
.build();
INDEX_OTHER_ENCODINGS = CLOSER.register(
otherGenerator.generate(otherSegment, schemaInfo, otherEncodings, Granularities.HOUR, ROWS_PER_SEGMENT)
);
}
@AfterClass
@ -127,34 +155,57 @@ public class ExpressionVectorSelectorsTest extends InitializedNullHandlingTest
CLOSER.close();
}
@Parameterized.Parameters(name = "expression = {0}")
@Parameterized.Parameters(name = "expression = {0}, encoding = {1}")
public static Iterable<?> constructorFeeder()
{
return EXPRESSIONS.stream().map(x -> new Object[]{x}).collect(Collectors.toList());
List<Object[]> params = new ArrayList<>();
for (String encoding : new String[]{"default", "front-coded-and-auto-longs"}) {
for (String expression : EXPRESSIONS) {
params.add(new Object[]{expression, encoding});
}
}
return params;
}
private String encoding;
private ExpressionType outputType;
private String expression;
public ExpressionVectorSelectorsTest(String expression)
private QueryableIndex queryableIndexToUse;
private Closer perTestCloser = Closer.create();
public ExpressionVectorSelectorsTest(String expression, String encoding)
{
this.expression = expression;
this.encoding = encoding;
if ("front-coded-and-auto-longs".equals(encoding)) {
this.queryableIndexToUse = INDEX_OTHER_ENCODINGS;
} else {
this.queryableIndexToUse = INDEX;
}
}
@Before
public void setup()
{
Expr parsed = Parser.parse(expression, ExprMacroTable.nil());
outputType = parsed.getOutputType(new DeprecatedQueryableIndexColumnSelector(INDEX));
outputType = parsed.getOutputType(new DeprecatedQueryableIndexColumnSelector(queryableIndexToUse));
if (outputType == null) {
outputType = ExpressionType.STRING;
}
}
@After
public void teardown() throws IOException
{
perTestCloser.close();
}
@Test
public void sanityTestVectorizedExpressionSelector()
{
sanityTestVectorizedExpressionSelectors(expression, outputType, INDEX, CLOSER, ROWS_PER_SEGMENT);
sanityTestVectorizedExpressionSelectors(expression, outputType, queryableIndexToUse, perTestCloser, ROWS_PER_SEGMENT);
}
public static void sanityTestVectorizedExpressionSelectors(

View File

@ -49,7 +49,7 @@ public class RealtimeTuningConfig implements AppenderatorConfig
private static final RejectionPolicyFactory DEFAULT_REJECTION_POLICY_FACTORY = new ServerTimeRejectionPolicyFactory();
private static final int DEFAULT_MAX_PENDING_PERSISTS = 0;
private static final ShardSpec DEFAULT_SHARD_SPEC = new NumberedShardSpec(0, 1);
private static final IndexSpec DEFAULT_INDEX_SPEC = new IndexSpec();
private static final IndexSpec DEFAULT_INDEX_SPEC = IndexSpec.DEFAULT;
private static final Boolean DEFAULT_REPORT_PARSE_EXCEPTIONS = Boolean.FALSE;
private static final long DEFAULT_HANDOFF_CONDITION_TIMEOUT = 0;
private static final long DEFAULT_ALERT_TIMEOUT = 0;

View File

@ -374,7 +374,7 @@ public class NewestSegmentFirstIterator implements CompactionSegmentIterator
final IndexSpec segmentIndexSpec = objectMapper.convertValue(lastCompactionState.getIndexSpec(), IndexSpec.class);
final IndexSpec configuredIndexSpec;
if (tuningConfig.getIndexSpec() == null) {
configuredIndexSpec = new IndexSpec();
configuredIndexSpec = IndexSpec.DEFAULT;
} else {
configuredIndexSpec = tuningConfig.getIndexSpec();
}

View File

@ -21,6 +21,8 @@ package org.apache.druid.indexing;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.client.indexing.ClientCompactionTaskQueryTuningConfig;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.data.CompressionStrategy;
import org.junit.Test;
public class ClientCompactQueryTuningConfigTest
@ -29,6 +31,12 @@ public class ClientCompactQueryTuningConfigTest
public void testEqualsContract()
{
// If this test failed, make sure to validate that toString was also updated correctly!
EqualsVerifier.forClass(ClientCompactionTaskQueryTuningConfig.class).usingGetClass().verify();
EqualsVerifier.forClass(ClientCompactionTaskQueryTuningConfig.class)
.withPrefabValues(
IndexSpec.class,
IndexSpec.DEFAULT,
IndexSpec.builder().withDimensionCompression(CompressionStrategy.ZSTD).build()
).usingGetClass()
.verify();
}
}

View File

@ -84,8 +84,8 @@ public class RealtimeTuningConfigTest
Assert.assertEquals(new OnheapIncrementalIndex.Spec(), config.getAppendableIndexSpec());
Assert.assertEquals(0, config.getHandoffConditionTimeout());
Assert.assertEquals(0, config.getAlertTimeout());
Assert.assertEquals(new IndexSpec(), config.getIndexSpec());
Assert.assertEquals(new IndexSpec(), config.getIndexSpecForIntermediatePersists());
Assert.assertEquals(IndexSpec.DEFAULT, config.getIndexSpec());
Assert.assertEquals(IndexSpec.DEFAULT, config.getIndexSpecForIntermediatePersists());
Assert.assertEquals(new Period("PT10M"), config.getIntermediatePersistPeriod());
Assert.assertEquals(new NumberedShardSpec(0, 1), config.getShardSpec());
Assert.assertEquals(0, config.getMaxPendingPersists());
@ -137,9 +137,12 @@ public class RealtimeTuningConfigTest
Assert.assertEquals(100, config.getPersistThreadPriority());
Assert.assertEquals(new Period("PT1H"), config.getWindowPeriod());
Assert.assertEquals(true, config.isReportParseExceptions());
Assert.assertEquals(new IndexSpec(null, null, CompressionStrategy.NONE, null), config.getIndexSpec());
Assert.assertEquals(
new IndexSpec(null, CompressionStrategy.UNCOMPRESSED, null, null),
IndexSpec.builder().withMetricCompression(CompressionStrategy.NONE).build(),
config.getIndexSpec()
);
Assert.assertEquals(
IndexSpec.builder().withDimensionCompression(CompressionStrategy.UNCOMPRESSED).build(),
config.getIndexSpecForIntermediatePersists()
);

View File

@ -173,7 +173,7 @@ public class ClosedSegmensSinksBatchAppenderatorTester implements AutoCloseable
maxRowsInMemory,
maxSizeInBytes == 0L ? getDefaultMaxBytesInMemory() : maxSizeInBytes,
skipBytesInMemoryOverheadCheck,
new IndexSpec(),
IndexSpec.DEFAULT,
0,
false,
0L,

View File

@ -144,7 +144,7 @@ public class OpenAndClosedSegmentsAppenderatorTester implements AutoCloseable
maxRowsInMemory,
maxSizeInBytes == 0L ? getDefaultMaxBytesInMemory() : maxSizeInBytes,
skipBytesInMemoryOverheadCheck,
new IndexSpec(),
IndexSpec.DEFAULT,
0,
false,
0L,

View File

@ -226,7 +226,7 @@ public class IngestSegmentFirehoseTest
for (String line : rows) {
index.add(parser.parse(line));
}
indexMerger.persist(index, segmentDir, new IndexSpec(), null);
indexMerger.persist(index, segmentDir, IndexSpec.DEFAULT, null);
}
}
}

View File

@ -348,13 +348,7 @@ public class SegmentManagerBroadcastJoinIndexedTableTest extends InitializedNull
data,
Intervals.of(interval),
segmentDir,
new IndexSpec(
null,
null,
null,
null,
factory
),
IndexSpec.builder().withSegmentLoader(factory).build(),
null
);
final File factoryJson = new File(segmentDir, "factory.json");

View File

@ -33,7 +33,6 @@ import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.data.BitmapSerde.DefaultBitmapSerdeFactory;
import org.apache.druid.segment.data.CompressionFactory.LongEncodingStrategy;
import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.incremental.OnheapIncrementalIndex;
@ -223,18 +222,16 @@ public class DataSourceCompactionConfigTest extends InitializedNullHandlingTest
null,
new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), null),
new DynamicPartitionsSpec(1000, 20000L),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.UNCOMPRESSED,
LongEncodingStrategy.AUTO
),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZF)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.UNCOMPRESSED)
.withLongEncoding(LongEncodingStrategy.AUTO)
.build(),
2,
1000L,
TmpFileSegmentWriteOutMediumFactory.instance(),
@ -264,18 +261,16 @@ public class DataSourceCompactionConfigTest extends InitializedNullHandlingTest
null,
new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), null),
new DynamicPartitionsSpec(1000, 20000L),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.UNCOMPRESSED,
LongEncodingStrategy.AUTO
),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZ4)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.UNCOMPRESSED)
.withLongEncoding(LongEncodingStrategy.AUTO)
.build(),
2,
1000L,
TmpFileSegmentWriteOutMediumFactory.instance(),

View File

@ -25,7 +25,6 @@ import org.apache.druid.indexer.partitions.DynamicPartitionsSpec;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.HumanReadableBytes;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.data.BitmapSerde.DefaultBitmapSerdeFactory;
import org.apache.druid.segment.data.CompressionFactory.LongEncodingStrategy;
import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.incremental.OnheapIncrementalIndex;
@ -83,18 +82,16 @@ public class UserCompactionTaskQueryTuningConfigTest
null,
new SegmentsSplitHintSpec(new HumanReadableBytes(42L), null),
new DynamicPartitionsSpec(1000, 20000L),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
new IndexSpec(
new DefaultBitmapSerdeFactory(),
CompressionStrategy.LZ4,
CompressionStrategy.UNCOMPRESSED,
LongEncodingStrategy.AUTO
),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZ4)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
IndexSpec.builder()
.withDimensionCompression(CompressionStrategy.LZ4)
.withMetricCompression(CompressionStrategy.LZ4)
.withLongEncoding(LongEncodingStrategy.LONGS)
.build(),
2,
1000L,
TmpFileSegmentWriteOutMediumFactory.instance(),

View File

@ -699,7 +699,7 @@ public class NewestSegmentFirstPolicyTest
public void testIteratorReturnsNothingAsSegmentsWasCompactedAndHaveSameSegmentGranularityAndSameTimezone()
{
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -732,7 +732,7 @@ public class NewestSegmentFirstPolicyTest
public void testIteratorReturnsNothingAsSegmentsWasCompactedAndHaveSameSegmentGranularityInLastCompactionState()
{
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -765,7 +765,7 @@ public class NewestSegmentFirstPolicyTest
public void testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentSegmentGranularity()
{
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -808,7 +808,7 @@ public class NewestSegmentFirstPolicyTest
public void testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentSegmentGranularityInLastCompactionState()
{
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -851,7 +851,7 @@ public class NewestSegmentFirstPolicyTest
public void testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentTimezone()
{
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -903,7 +903,7 @@ public class NewestSegmentFirstPolicyTest
public void testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentOrigin()
{
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -954,7 +954,7 @@ public class NewestSegmentFirstPolicyTest
public void testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentRollup()
{
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -1014,7 +1014,7 @@ public class NewestSegmentFirstPolicyTest
public void testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentQueryGranularity()
{
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -1074,7 +1074,7 @@ public class NewestSegmentFirstPolicyTest
public void testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentDimensions()
{
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -1173,7 +1173,7 @@ public class NewestSegmentFirstPolicyTest
{
NullHandling.initializeForTests();
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -1297,7 +1297,7 @@ public class NewestSegmentFirstPolicyTest
.addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE)
);
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {});
Map<String, Object> indexSpec = IndexSpec.DEFAULT.asMap(mapper);
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -1446,7 +1446,7 @@ public class NewestSegmentFirstPolicyTest
public void testIteratorReturnsSegmentsAsCompactionStateChangedWithCompactedStateHasSameSegmentGranularity()
{
// Different indexSpec as what is set in the auto compaction config
IndexSpec newIndexSpec = new IndexSpec(new ConciseBitmapSerdeFactory(), null, null, null);
IndexSpec newIndexSpec = IndexSpec.builder().withBitmapSerdeFactory(new ConciseBitmapSerdeFactory()).build();
Map<String, Object> newIndexSpecMap = mapper.convertValue(newIndexSpec, new TypeReference<Map<String, Object>>() {});
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null, null));
@ -1508,7 +1508,7 @@ public class NewestSegmentFirstPolicyTest
null,
null,
null,
mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {}),
IndexSpec.DEFAULT.asMap(mapper),
null
)
)
@ -1528,7 +1528,7 @@ public class NewestSegmentFirstPolicyTest
1000L,
null,
partitionsSpec,
new IndexSpec(),
IndexSpec.DEFAULT,
null,
null,
null,
@ -1563,7 +1563,7 @@ public class NewestSegmentFirstPolicyTest
1000L,
null,
partitionsSpec,
new IndexSpec(),
IndexSpec.DEFAULT,
null,
null,
null,

View File

@ -161,7 +161,7 @@ public class DruidJsonValidatorTest
null,
null,
new DynamicPartitionsSpec(10000, null),
new IndexSpec(),
IndexSpec.DEFAULT,
null,
3,
false,