mirror of
https://github.com/apache/druid.git
synced 2025-02-17 07:25:02 +00:00
Updates default inputSegmentSizeBytes in Compaction config (#12534)
Fixes Cannot serialize BigInt value as JSON error while loading compaction config in console.
This commit is contained in:
parent
215b90d1a4
commit
c877d8a981
@ -34,7 +34,8 @@ public class DataSourceCompactionConfig
|
|||||||
{
|
{
|
||||||
/** Must be synced with Tasks.DEFAULT_MERGE_TASK_PRIORITY */
|
/** Must be synced with Tasks.DEFAULT_MERGE_TASK_PRIORITY */
|
||||||
public static final int DEFAULT_COMPACTION_TASK_PRIORITY = 25;
|
public static final int DEFAULT_COMPACTION_TASK_PRIORITY = 25;
|
||||||
private static final long DEFAULT_INPUT_SEGMENT_SIZE_BYTES = Long.MAX_VALUE;
|
// Approx. 100TB. Chosen instead of Long.MAX_VALUE to avoid overflow on web-console and other clients
|
||||||
|
private static final long DEFAULT_INPUT_SEGMENT_SIZE_BYTES = 100_000_000_000_000L;
|
||||||
private static final Period DEFAULT_SKIP_OFFSET_FROM_LATEST = new Period("P1D");
|
private static final Period DEFAULT_SKIP_OFFSET_FROM_LATEST = new Period("P1D");
|
||||||
|
|
||||||
private final String dataSource;
|
private final String dataSource;
|
||||||
|
@ -61,7 +61,7 @@ public class DataSourceCompactionConfigTest extends InitializedNullHandlingTest
|
|||||||
final DataSourceCompactionConfig config = new DataSourceCompactionConfig(
|
final DataSourceCompactionConfig config = new DataSourceCompactionConfig(
|
||||||
"dataSource",
|
"dataSource",
|
||||||
null,
|
null,
|
||||||
500L,
|
null,
|
||||||
null,
|
null,
|
||||||
new Period(3600),
|
new Period(3600),
|
||||||
null,
|
null,
|
||||||
@ -77,7 +77,7 @@ public class DataSourceCompactionConfigTest extends InitializedNullHandlingTest
|
|||||||
|
|
||||||
Assert.assertEquals(config.getDataSource(), fromJson.getDataSource());
|
Assert.assertEquals(config.getDataSource(), fromJson.getDataSource());
|
||||||
Assert.assertEquals(25, fromJson.getTaskPriority());
|
Assert.assertEquals(25, fromJson.getTaskPriority());
|
||||||
Assert.assertEquals(config.getInputSegmentSizeBytes(), fromJson.getInputSegmentSizeBytes());
|
Assert.assertEquals(100_000_000_000_000L, fromJson.getInputSegmentSizeBytes());
|
||||||
Assert.assertEquals(config.getMaxRowsPerSegment(), fromJson.getMaxRowsPerSegment());
|
Assert.assertEquals(config.getMaxRowsPerSegment(), fromJson.getMaxRowsPerSegment());
|
||||||
Assert.assertEquals(config.getSkipOffsetFromLatest(), fromJson.getSkipOffsetFromLatest());
|
Assert.assertEquals(config.getSkipOffsetFromLatest(), fromJson.getSkipOffsetFromLatest());
|
||||||
Assert.assertEquals(config.getTuningConfig(), fromJson.getTuningConfig());
|
Assert.assertEquals(config.getTuningConfig(), fromJson.getTuningConfig());
|
||||||
|
Loading…
x
Reference in New Issue
Block a user