decouple column serializer compression closers from SegmentWriteoutMedium to optionally allow serializers to release direct memory allocated for compression earlier than when segment is completed (#16076)

This commit is contained in:
Clint Wylie 2024-03-11 12:28:04 -07:00 committed by GitHub
parent 8084f2206b
commit 313da98879
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
39 changed files with 224 additions and 117 deletions

View File

@ -240,7 +240,8 @@ public class BaseColumnarLongsBenchmark
"lz4-longs", "lz4-longs",
ByteOrder.LITTLE_ENDIAN, ByteOrder.LITTLE_ENDIAN,
CompressionFactory.LongEncodingStrategy.LONGS, CompressionFactory.LongEncodingStrategy.LONGS,
CompressionStrategy.LZ4 CompressionStrategy.LZ4,
writeOutMedium.getCloser()
); );
break; break;
case "lz4-auto": case "lz4-auto":
@ -250,7 +251,8 @@ public class BaseColumnarLongsBenchmark
"lz4-auto", "lz4-auto",
ByteOrder.LITTLE_ENDIAN, ByteOrder.LITTLE_ENDIAN,
CompressionFactory.LongEncodingStrategy.AUTO, CompressionFactory.LongEncodingStrategy.AUTO,
CompressionStrategy.LZ4 CompressionStrategy.LZ4,
writeOutMedium.getCloser()
); );
break; break;
case "none-longs": case "none-longs":
@ -260,7 +262,8 @@ public class BaseColumnarLongsBenchmark
"none-longs", "none-longs",
ByteOrder.LITTLE_ENDIAN, ByteOrder.LITTLE_ENDIAN,
CompressionFactory.LongEncodingStrategy.LONGS, CompressionFactory.LongEncodingStrategy.LONGS,
CompressionStrategy.NONE CompressionStrategy.NONE,
writeOutMedium.getCloser()
); );
break; break;
case "none-auto": case "none-auto":
@ -270,7 +273,8 @@ public class BaseColumnarLongsBenchmark
"none-auto", "none-auto",
ByteOrder.LITTLE_ENDIAN, ByteOrder.LITTLE_ENDIAN,
CompressionFactory.LongEncodingStrategy.AUTO, CompressionFactory.LongEncodingStrategy.AUTO,
CompressionStrategy.NONE CompressionStrategy.NONE,
writeOutMedium.getCloser()
); );
break; break;
case "zstd-longs": case "zstd-longs":
@ -280,7 +284,8 @@ public class BaseColumnarLongsBenchmark
"zstd-longs", "zstd-longs",
ByteOrder.LITTLE_ENDIAN, ByteOrder.LITTLE_ENDIAN,
CompressionFactory.LongEncodingStrategy.LONGS, CompressionFactory.LongEncodingStrategy.LONGS,
CompressionStrategy.ZSTD CompressionStrategy.ZSTD,
writeOutMedium.getCloser()
); );
break; break;
case "zstd-auto": case "zstd-auto":
@ -290,7 +295,8 @@ public class BaseColumnarLongsBenchmark
"zstd-auto", "zstd-auto",
ByteOrder.LITTLE_ENDIAN, ByteOrder.LITTLE_ENDIAN,
CompressionFactory.LongEncodingStrategy.AUTO, CompressionFactory.LongEncodingStrategy.AUTO,
CompressionStrategy.ZSTD CompressionStrategy.ZSTD,
writeOutMedium.getCloser()
); );
break; break;
default: default:

View File

@ -29,6 +29,7 @@ import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.generator.ColumnValueGenerator; import org.apache.druid.segment.generator.ColumnValueGenerator;
import org.apache.druid.segment.generator.GeneratorColumnSchema; import org.apache.druid.segment.generator.GeneratorColumnSchema;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
@ -155,12 +156,14 @@ public class FloatCompressionBenchmarkFileGenerator
compFile.delete(); compFile.delete();
File dataFile = new File(dir, entry.getKey()); File dataFile = new File(dir, entry.getKey());
SegmentWriteOutMedium segmentWriteOutMedium = new OffHeapMemorySegmentWriteOutMedium();
ColumnarFloatsSerializer writer = CompressionFactory.getFloatSerializer( ColumnarFloatsSerializer writer = CompressionFactory.getFloatSerializer(
"float-benchmark", "float-benchmark",
new OffHeapMemorySegmentWriteOutMedium(), segmentWriteOutMedium,
"float", "float",
ByteOrder.nativeOrder(), ByteOrder.nativeOrder(),
compression compression,
segmentWriteOutMedium.getCloser()
); );
try ( try (
BufferedReader br = Files.newBufferedReader(dataFile.toPath(), StandardCharsets.UTF_8); BufferedReader br = Files.newBufferedReader(dataFile.toPath(), StandardCharsets.UTF_8);

View File

@ -29,6 +29,7 @@ import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.generator.ColumnValueGenerator; import org.apache.druid.segment.generator.ColumnValueGenerator;
import org.apache.druid.segment.generator.GeneratorColumnSchema; import org.apache.druid.segment.generator.GeneratorColumnSchema;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
@ -148,13 +149,15 @@ public class LongCompressionBenchmarkFileGenerator
compFile.delete(); compFile.delete();
File dataFile = new File(dir, entry.getKey()); File dataFile = new File(dir, entry.getKey());
SegmentWriteOutMedium segmentWriteOutMedium = new OffHeapMemorySegmentWriteOutMedium();
ColumnarLongsSerializer writer = CompressionFactory.getLongSerializer( ColumnarLongsSerializer writer = CompressionFactory.getLongSerializer(
"long-benchmark", "long-benchmark",
new OffHeapMemorySegmentWriteOutMedium(), segmentWriteOutMedium,
"long", "long",
ByteOrder.nativeOrder(), ByteOrder.nativeOrder(),
encoding, encoding,
compression compression,
segmentWriteOutMedium.getCloser()
); );
try ( try (
BufferedReader br = Files.newBufferedReader(dataFile.toPath(), StandardCharsets.UTF_8); BufferedReader br = Files.newBufferedReader(dataFile.toPath(), StandardCharsets.UTF_8);

View File

@ -49,7 +49,8 @@ public class CompressedBigDecimalLongColumnSerializer implements GenericColumnSe
*/ */
public static CompressedBigDecimalLongColumnSerializer create( public static CompressedBigDecimalLongColumnSerializer create(
SegmentWriteOutMedium segmentWriteOutMedium, SegmentWriteOutMedium segmentWriteOutMedium,
String filenameBase) String filenameBase
)
{ {
return new CompressedBigDecimalLongColumnSerializer( return new CompressedBigDecimalLongColumnSerializer(
CompressedVSizeColumnarIntsSerializer.create( CompressedVSizeColumnarIntsSerializer.create(
@ -57,13 +58,17 @@ public class CompressedBigDecimalLongColumnSerializer implements GenericColumnSe
segmentWriteOutMedium, segmentWriteOutMedium,
String.format(Locale.ROOT, "%s.scale", filenameBase), String.format(Locale.ROOT, "%s.scale", filenameBase),
16, 16,
CompressionStrategy.LZ4), CompressionStrategy.LZ4,
segmentWriteOutMedium.getCloser()
),
V3CompressedVSizeColumnarMultiIntsSerializer.create( V3CompressedVSizeColumnarMultiIntsSerializer.create(
"dummy", "dummy",
segmentWriteOutMedium, segmentWriteOutMedium,
String.format(Locale.ROOT, "%s.magnitude", filenameBase), String.format(Locale.ROOT, "%s.magnitude", filenameBase),
Integer.MAX_VALUE, Integer.MAX_VALUE,
CompressionStrategy.LZ4)); CompressionStrategy.LZ4
)
);
} }
private final CompressedVSizeColumnarIntsSerializer scaleWriter; private final CompressedVSizeColumnarIntsSerializer scaleWriter;

View File

@ -423,7 +423,8 @@ public abstract class DictionaryEncodedColumnMerger<T extends Comparable<T>> imp
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
cardinality, cardinality,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
} else { } else {
encodedValueSerializer = new VSizeColumnarIntsSerializer(segmentWriteOutMedium, cardinality); encodedValueSerializer = new VSizeColumnarIntsSerializer(segmentWriteOutMedium, cardinality);

View File

@ -72,7 +72,8 @@ public class DoubleColumnSerializer implements GenericColumnSerializer<Object>
segmentWriteOutMedium, segmentWriteOutMedium,
StringUtils.format("%s.double_column", filenameBase), StringUtils.format("%s.double_column", filenameBase),
byteOrder, byteOrder,
compression compression,
segmentWriteOutMedium.getCloser()
); );
writer.open(); writer.open();
} }

View File

@ -99,7 +99,8 @@ public class DoubleColumnSerializerV2 implements GenericColumnSerializer<Object>
segmentWriteOutMedium, segmentWriteOutMedium,
StringUtils.format("%s.double_column", filenameBase), StringUtils.format("%s.double_column", filenameBase),
byteOrder, byteOrder,
compression compression,
segmentWriteOutMedium.getCloser()
); );
writer.open(); writer.open();
nullValueBitmapWriter = new ByteBufferWriter<>( nullValueBitmapWriter = new ByteBufferWriter<>(

View File

@ -72,7 +72,8 @@ public class FloatColumnSerializer implements GenericColumnSerializer<Object>
segmentWriteOutMedium, segmentWriteOutMedium,
StringUtils.format("%s.float_column", filenameBase), StringUtils.format("%s.float_column", filenameBase),
byteOrder, byteOrder,
compression compression,
segmentWriteOutMedium.getCloser()
); );
writer.open(); writer.open();
} }

View File

@ -99,7 +99,8 @@ public class FloatColumnSerializerV2 implements GenericColumnSerializer<Object>
segmentWriteOutMedium, segmentWriteOutMedium,
StringUtils.format("%s.float_column", filenameBase), StringUtils.format("%s.float_column", filenameBase),
byteOrder, byteOrder,
compression compression,
segmentWriteOutMedium.getCloser()
); );
writer.open(); writer.open();
nullValueBitmapWriter = new ByteBufferWriter<>( nullValueBitmapWriter = new ByteBufferWriter<>(

View File

@ -202,7 +202,6 @@ public class IndexMergerV9 implements IndexMerger
mergers.add( mergers.add(
handler.makeMerger( handler.makeMerger(
indexSpec, indexSpec,
segmentWriteOutMedium, segmentWriteOutMedium,
dimFormats.get(i).toColumnCapabilities(), dimFormats.get(i).toColumnCapabilities(),
progress, progress,

View File

@ -80,7 +80,8 @@ public class LongColumnSerializer implements GenericColumnSerializer<Object>
StringUtils.format("%s.long_column", filenameBase), StringUtils.format("%s.long_column", filenameBase),
byteOrder, byteOrder,
encoding, encoding,
compression compression,
segmentWriteOutMedium.getCloser()
); );
writer.open(); writer.open();
} }

View File

@ -105,7 +105,8 @@ public class LongColumnSerializerV2 implements GenericColumnSerializer<Object>
StringUtils.format("%s.long_column", filenameBase), StringUtils.format("%s.long_column", filenameBase),
byteOrder, byteOrder,
encoding, encoding,
compression compression,
segmentWriteOutMedium.getCloser()
); );
writer.open(); writer.open();
nullValueBitmapWriter = new ByteBufferWriter<>( nullValueBitmapWriter = new ByteBufferWriter<>(

View File

@ -56,7 +56,8 @@ public class BlockLayoutColumnarDoublesSerializer implements ColumnarDoublesSeri
SegmentWriteOutMedium segmentWriteOutMedium, SegmentWriteOutMedium segmentWriteOutMedium,
String filenameBase, String filenameBase,
ByteOrder byteOrder, ByteOrder byteOrder,
CompressionStrategy compression CompressionStrategy compression,
Closer closer
) )
{ {
this.columnName = columnName; this.columnName = columnName;
@ -64,11 +65,11 @@ public class BlockLayoutColumnarDoublesSerializer implements ColumnarDoublesSeri
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
compression, compression,
CompressedPools.BUFFER_SIZE CompressedPools.BUFFER_SIZE,
closer
); );
this.compression = compression; this.compression = compression;
CompressionStrategy.Compressor compressor = compression.getCompressor(); CompressionStrategy.Compressor compressor = compression.getCompressor();
Closer closer = segmentWriteOutMedium.getCloser();
this.endBuffer = compressor.allocateInBuffer(CompressedPools.BUFFER_SIZE, closer).order(byteOrder); this.endBuffer = compressor.allocateInBuffer(CompressedPools.BUFFER_SIZE, closer).order(byteOrder);
} }

View File

@ -56,7 +56,8 @@ public class BlockLayoutColumnarFloatsSerializer implements ColumnarFloatsSerial
SegmentWriteOutMedium segmentWriteOutMedium, SegmentWriteOutMedium segmentWriteOutMedium,
String filenameBase, String filenameBase,
ByteOrder byteOrder, ByteOrder byteOrder,
CompressionStrategy compression CompressionStrategy compression,
Closer closer
) )
{ {
this.columnName = columnName; this.columnName = columnName;
@ -64,11 +65,11 @@ public class BlockLayoutColumnarFloatsSerializer implements ColumnarFloatsSerial
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
compression, compression,
CompressedPools.BUFFER_SIZE CompressedPools.BUFFER_SIZE,
closer
); );
this.compression = compression; this.compression = compression;
CompressionStrategy.Compressor compressor = compression.getCompressor(); CompressionStrategy.Compressor compressor = compression.getCompressor();
Closer closer = segmentWriteOutMedium.getCloser();
this.endBuffer = compressor.allocateInBuffer(CompressedPools.BUFFER_SIZE, closer).order(byteOrder); this.endBuffer = compressor.allocateInBuffer(CompressedPools.BUFFER_SIZE, closer).order(byteOrder);
} }

View File

@ -19,13 +19,13 @@
package org.apache.druid.segment.data; package org.apache.druid.segment.data;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.io.smoosh.FileSmoosher; import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
import org.apache.druid.segment.CompressedPools; import org.apache.druid.segment.CompressedPools;
import org.apache.druid.segment.serde.MetaSerdeHelper; import org.apache.druid.segment.serde.MetaSerdeHelper;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium; import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
@ -59,17 +59,24 @@ public class BlockLayoutColumnarLongsSerializer implements ColumnarLongsSerializ
String filenameBase, String filenameBase,
ByteOrder byteOrder, ByteOrder byteOrder,
CompressionFactory.LongEncodingWriter writer, CompressionFactory.LongEncodingWriter writer,
CompressionStrategy compression CompressionStrategy compression,
Closer closer
) )
{ {
this.columnName = columnName; this.columnName = columnName;
this.sizePer = writer.getBlockSize(CompressedPools.BUFFER_SIZE); this.sizePer = writer.getBlockSize(CompressedPools.BUFFER_SIZE);
int bufferSize = writer.getNumBytes(sizePer); int bufferSize = writer.getNumBytes(sizePer);
this.flattener = GenericIndexedWriter.ofCompressedByteBuffers(segmentWriteOutMedium, filenameBase, compression, bufferSize); this.flattener = GenericIndexedWriter.ofCompressedByteBuffers(
segmentWriteOutMedium,
filenameBase,
compression,
bufferSize,
closer
);
this.writer = writer; this.writer = writer;
this.compression = compression; this.compression = compression;
CompressionStrategy.Compressor compressor = compression.getCompressor(); CompressionStrategy.Compressor compressor = compression.getCompressor();
endBuffer = compressor.allocateInBuffer(writer.getNumBytes(sizePer), segmentWriteOutMedium.getCloser()).order(byteOrder); endBuffer = compressor.allocateInBuffer(writer.getNumBytes(sizePer), closer).order(byteOrder);
writer.setBuffer(endBuffer); writer.setBuffer(endBuffer);
numInsertedForNextFlush = sizePer; numInsertedForNextFlush = sizePer;
} }

View File

@ -20,6 +20,7 @@
package org.apache.druid.segment.data; package org.apache.druid.segment.data;
import org.apache.druid.io.Channels; import org.apache.druid.io.Channels;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.io.smoosh.FileSmoosher; import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
import org.apache.druid.segment.CompressedPools; import org.apache.druid.segment.CompressedPools;
import org.apache.druid.segment.serde.MetaSerdeHelper; import org.apache.druid.segment.serde.MetaSerdeHelper;
@ -60,18 +61,16 @@ public class CompressedBlockSerializer implements Serializer
public CompressedBlockSerializer( public CompressedBlockSerializer(
SegmentWriteOutMedium segmentWriteOutMedium, SegmentWriteOutMedium segmentWriteOutMedium,
CompressionStrategy compression, CompressionStrategy compression,
int blockSize int blockSize,
Closer closer
) )
{ {
this.segmentWriteOutMedium = segmentWriteOutMedium; this.segmentWriteOutMedium = segmentWriteOutMedium;
this.compression = compression; this.compression = compression;
this.compressor = compression.getCompressor(); this.compressor = compression.getCompressor();
this.uncompressedDataBuffer = compressor.allocateInBuffer(blockSize, segmentWriteOutMedium.getCloser()) this.uncompressedDataBuffer = compressor.allocateInBuffer(blockSize, closer).order(ByteOrder.nativeOrder());
.order(ByteOrder.nativeOrder()); this.compressedDataBuffer = compressor.allocateOutBuffer(blockSize, closer).order(ByteOrder.nativeOrder());
this.compressedDataBuffer = compressor.allocateOutBuffer(blockSize, segmentWriteOutMedium.getCloser())
.order(ByteOrder.nativeOrder());
} }
public void open() throws IOException public void open() throws IOException

View File

@ -58,12 +58,12 @@ public class CompressedColumnarIntsSerializer extends SingleValueColumnarIntsSer
final String filenameBase, final String filenameBase,
final int chunkFactor, final int chunkFactor,
final ByteOrder byteOrder, final ByteOrder byteOrder,
final CompressionStrategy compression final CompressionStrategy compression,
final Closer closer
) )
{ {
this( this(
columnName, columnName,
segmentWriteOutMedium,
chunkFactor, chunkFactor,
byteOrder, byteOrder,
compression, compression,
@ -71,18 +71,20 @@ public class CompressedColumnarIntsSerializer extends SingleValueColumnarIntsSer
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
compression, compression,
chunkFactor * Integer.BYTES chunkFactor * Integer.BYTES,
) closer
),
closer
); );
} }
CompressedColumnarIntsSerializer( CompressedColumnarIntsSerializer(
final String columnName, final String columnName,
final SegmentWriteOutMedium segmentWriteOutMedium,
final int chunkFactor, final int chunkFactor,
final ByteOrder byteOrder, final ByteOrder byteOrder,
final CompressionStrategy compression, final CompressionStrategy compression,
final GenericIndexedWriter<ByteBuffer> flattener final GenericIndexedWriter<ByteBuffer> flattener,
final Closer closer
) )
{ {
this.columnName = columnName; this.columnName = columnName;
@ -90,7 +92,6 @@ public class CompressedColumnarIntsSerializer extends SingleValueColumnarIntsSer
this.compression = compression; this.compression = compression;
this.flattener = flattener; this.flattener = flattener;
CompressionStrategy.Compressor compressor = compression.getCompressor(); CompressionStrategy.Compressor compressor = compression.getCompressor();
Closer closer = segmentWriteOutMedium.getCloser();
this.endBuffer = compressor.allocateInBuffer(chunkFactor * Integer.BYTES, closer).order(byteOrder); this.endBuffer = compressor.allocateInBuffer(chunkFactor * Integer.BYTES, closer).order(byteOrder);
this.numInserted = 0; this.numInserted = 0;
} }

View File

@ -19,6 +19,7 @@
package org.apache.druid.segment.data; package org.apache.druid.segment.data;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.io.smoosh.FileSmoosher; import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
import org.apache.druid.segment.CompressedPools; import org.apache.druid.segment.CompressedPools;
import org.apache.druid.segment.serde.Serializer; import org.apache.druid.segment.serde.Serializer;
@ -34,12 +35,17 @@ public class CompressedLongsSerializer implements Serializer
private final CompressedBlockSerializer blockSerializer; private final CompressedBlockSerializer blockSerializer;
private final ByteBuffer longValueConverter = ByteBuffer.allocate(Long.BYTES).order(ByteOrder.nativeOrder()); private final ByteBuffer longValueConverter = ByteBuffer.allocate(Long.BYTES).order(ByteOrder.nativeOrder());
public CompressedLongsSerializer(SegmentWriteOutMedium segmentWriteOutMedium, CompressionStrategy compression) public CompressedLongsSerializer(
SegmentWriteOutMedium segmentWriteOutMedium,
CompressionStrategy compression,
Closer closer
)
{ {
this.blockSerializer = new CompressedBlockSerializer( this.blockSerializer = new CompressedBlockSerializer(
segmentWriteOutMedium, segmentWriteOutMedium,
compression, compression,
CompressedPools.BUFFER_SIZE CompressedPools.BUFFER_SIZE,
closer
); );
} }

View File

@ -20,13 +20,13 @@
package org.apache.druid.segment.data; package org.apache.druid.segment.data;
import org.apache.druid.common.utils.ByteUtils; import org.apache.druid.common.utils.ByteUtils;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.io.smoosh.FileSmoosher; import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.IndexIO;
import org.apache.druid.segment.serde.MetaSerdeHelper; import org.apache.druid.segment.serde.MetaSerdeHelper;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium; import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
@ -51,7 +51,8 @@ public class CompressedVSizeColumnarIntsSerializer extends SingleValueColumnarIn
final SegmentWriteOutMedium segmentWriteOutMedium, final SegmentWriteOutMedium segmentWriteOutMedium,
final String filenameBase, final String filenameBase,
final int maxValue, final int maxValue,
final CompressionStrategy compression final CompressionStrategy compression,
final Closer closer
) )
{ {
return new CompressedVSizeColumnarIntsSerializer( return new CompressedVSizeColumnarIntsSerializer(
@ -61,7 +62,8 @@ public class CompressedVSizeColumnarIntsSerializer extends SingleValueColumnarIn
maxValue, maxValue,
CompressedVSizeColumnarIntsSupplier.maxIntsInBufferForValue(maxValue), CompressedVSizeColumnarIntsSupplier.maxIntsInBufferForValue(maxValue),
IndexIO.BYTE_ORDER, IndexIO.BYTE_ORDER,
compression compression,
closer
); );
} }
@ -84,12 +86,12 @@ public class CompressedVSizeColumnarIntsSerializer extends SingleValueColumnarIn
final int maxValue, final int maxValue,
final int chunkFactor, final int chunkFactor,
final ByteOrder byteOrder, final ByteOrder byteOrder,
final CompressionStrategy compression final CompressionStrategy compression,
final Closer closer
) )
{ {
this( this(
columnName, columnName,
segmentWriteOutMedium,
maxValue, maxValue,
chunkFactor, chunkFactor,
byteOrder, byteOrder,
@ -98,19 +100,21 @@ public class CompressedVSizeColumnarIntsSerializer extends SingleValueColumnarIn
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
compression, compression,
sizePer(maxValue, chunkFactor) sizePer(maxValue, chunkFactor),
) closer
),
closer
); );
} }
CompressedVSizeColumnarIntsSerializer( CompressedVSizeColumnarIntsSerializer(
final String columnName, final String columnName,
final SegmentWriteOutMedium segmentWriteOutMedium,
final int maxValue, final int maxValue,
final int chunkFactor, final int chunkFactor,
final ByteOrder byteOrder, final ByteOrder byteOrder,
final CompressionStrategy compression, final CompressionStrategy compression,
final GenericIndexedWriter<ByteBuffer> flattener final GenericIndexedWriter<ByteBuffer> flattener,
final Closer closer
) )
{ {
this.columnName = columnName; this.columnName = columnName;
@ -122,7 +126,7 @@ public class CompressedVSizeColumnarIntsSerializer extends SingleValueColumnarIn
this.flattener = flattener; this.flattener = flattener;
this.intBuffer = ByteBuffer.allocate(Integer.BYTES).order(byteOrder); this.intBuffer = ByteBuffer.allocate(Integer.BYTES).order(byteOrder);
CompressionStrategy.Compressor compressor = compression.getCompressor(); CompressionStrategy.Compressor compressor = compression.getCompressor();
this.endBuffer = compressor.allocateInBuffer(chunkBytes, segmentWriteOutMedium.getCloser()).order(byteOrder); this.endBuffer = compressor.allocateInBuffer(chunkBytes, closer).order(byteOrder);
this.numInserted = 0; this.numInserted = 0;
} }

View File

@ -66,13 +66,18 @@ public class CompressedVariableSizedBlobColumnSerializer implements Serializer
{ {
numValues = 0; numValues = 0;
currentOffset = 0; currentOffset = 0;
offsetsSerializer = new CompressedLongsSerializer(segmentWriteOutMedium, compression); offsetsSerializer = new CompressedLongsSerializer(
segmentWriteOutMedium,
compression,
segmentWriteOutMedium.getCloser()
);
offsetsSerializer.open(); offsetsSerializer.open();
valuesSerializer = new CompressedBlockSerializer( valuesSerializer = new CompressedBlockSerializer(
segmentWriteOutMedium, segmentWriteOutMedium,
compression, compression,
CompressedPools.BUFFER_SIZE CompressedPools.BUFFER_SIZE,
segmentWriteOutMedium.getCloser()
); );
valuesSerializer.open(); valuesSerializer.open();
} }

View File

@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonValue;
import com.google.common.base.Supplier; import com.google.common.base.Supplier;
import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.segment.serde.MetaSerdeHelper; import org.apache.druid.segment.serde.MetaSerdeHelper;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium; import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import org.apache.druid.segment.writeout.WriteOutBytes; import org.apache.druid.segment.writeout.WriteOutBytes;
@ -324,7 +325,8 @@ public class CompressionFactory
String filenameBase, String filenameBase,
ByteOrder order, ByteOrder order,
LongEncodingStrategy encodingStrategy, LongEncodingStrategy encodingStrategy,
CompressionStrategy compressionStrategy CompressionStrategy compressionStrategy,
Closer closer
) )
{ {
if (encodingStrategy == LongEncodingStrategy.AUTO) { if (encodingStrategy == LongEncodingStrategy.AUTO) {
@ -333,7 +335,8 @@ public class CompressionFactory
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
order, order,
compressionStrategy compressionStrategy,
closer
); );
} else if (encodingStrategy == LongEncodingStrategy.LONGS) { } else if (encodingStrategy == LongEncodingStrategy.LONGS) {
if (compressionStrategy == CompressionStrategy.NONE) { if (compressionStrategy == CompressionStrategy.NONE) {
@ -349,7 +352,8 @@ public class CompressionFactory
filenameBase, filenameBase,
order, order,
new LongsLongEncodingWriter(order), new LongsLongEncodingWriter(order),
compressionStrategy compressionStrategy,
closer
); );
} }
} else { } else {
@ -379,7 +383,8 @@ public class CompressionFactory
SegmentWriteOutMedium segmentWriteOutMedium, SegmentWriteOutMedium segmentWriteOutMedium,
String filenameBase, String filenameBase,
ByteOrder order, ByteOrder order,
CompressionStrategy compressionStrategy CompressionStrategy compressionStrategy,
Closer closer
) )
{ {
if (compressionStrategy == CompressionStrategy.NONE) { if (compressionStrategy == CompressionStrategy.NONE) {
@ -390,7 +395,8 @@ public class CompressionFactory
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
order, order,
compressionStrategy compressionStrategy,
closer
); );
} }
} }
@ -417,7 +423,8 @@ public class CompressionFactory
SegmentWriteOutMedium segmentWriteOutMedium, SegmentWriteOutMedium segmentWriteOutMedium,
String filenameBase, String filenameBase,
ByteOrder byteOrder, ByteOrder byteOrder,
CompressionStrategy compression CompressionStrategy compression,
Closer closer
) )
{ {
if (compression == CompressionStrategy.NONE) { if (compression == CompressionStrategy.NONE) {
@ -428,7 +435,8 @@ public class CompressionFactory
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
byteOrder, byteOrder,
compression compression,
closer
); );
} }
} }

View File

@ -77,13 +77,14 @@ public class GenericIndexedWriter<T> implements DictionaryWriter<T>
final SegmentWriteOutMedium segmentWriteOutMedium, final SegmentWriteOutMedium segmentWriteOutMedium,
final String filenameBase, final String filenameBase,
final CompressionStrategy compressionStrategy, final CompressionStrategy compressionStrategy,
final int bufferSize final int bufferSize,
final Closer closer
) )
{ {
GenericIndexedWriter<ByteBuffer> writer = new GenericIndexedWriter<>( GenericIndexedWriter<ByteBuffer> writer = new GenericIndexedWriter<>(
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
compressedByteBuffersWriteObjectStrategy(compressionStrategy, bufferSize, segmentWriteOutMedium.getCloser()) compressedByteBuffersWriteObjectStrategy(compressionStrategy, bufferSize, closer)
); );
writer.objectsSorted = false; writer.objectsSorted = false;
return writer; return writer;

View File

@ -24,11 +24,11 @@ import it.unimi.dsi.fastutil.longs.Long2IntMap;
import it.unimi.dsi.fastutil.longs.Long2IntOpenHashMap; import it.unimi.dsi.fastutil.longs.Long2IntOpenHashMap;
import it.unimi.dsi.fastutil.longs.LongArrayList; import it.unimi.dsi.fastutil.longs.LongArrayList;
import it.unimi.dsi.fastutil.longs.LongList; import it.unimi.dsi.fastutil.longs.LongList;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.io.smoosh.FileSmoosher; import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium; import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteOrder; import java.nio.ByteOrder;
import java.nio.channels.WritableByteChannel; import java.nio.channels.WritableByteChannel;
@ -45,6 +45,7 @@ public class IntermediateColumnarLongsSerializer implements ColumnarLongsSeriali
private final String filenameBase; private final String filenameBase;
private final ByteOrder order; private final ByteOrder order;
private final CompressionStrategy compression; private final CompressionStrategy compression;
private final Closer closer;
private int numInserted = 0; private int numInserted = 0;
@ -64,7 +65,8 @@ public class IntermediateColumnarLongsSerializer implements ColumnarLongsSeriali
SegmentWriteOutMedium segmentWriteOutMedium, SegmentWriteOutMedium segmentWriteOutMedium,
String filenameBase, String filenameBase,
ByteOrder order, ByteOrder order,
CompressionStrategy compression CompressionStrategy compression,
Closer closer
) )
{ {
this.columnName = columnName; this.columnName = columnName;
@ -72,6 +74,7 @@ public class IntermediateColumnarLongsSerializer implements ColumnarLongsSeriali
this.filenameBase = filenameBase; this.filenameBase = filenameBase;
this.order = order; this.order = order;
this.compression = compression; this.compression = compression;
this.closer = closer;
} }
@Override @Override
@ -141,7 +144,8 @@ public class IntermediateColumnarLongsSerializer implements ColumnarLongsSeriali
filenameBase, filenameBase,
order, order,
writer, writer,
compression compression,
closer
); );
} }

View File

@ -51,7 +51,8 @@ public class V3CompressedVSizeColumnarMultiIntsSerializer extends ColumnarMultiI
filenameBase, filenameBase,
CompressedColumnarIntsSupplier.MAX_INTS_IN_BUFFER, CompressedColumnarIntsSupplier.MAX_INTS_IN_BUFFER,
IndexIO.BYTE_ORDER, IndexIO.BYTE_ORDER,
compression compression,
segmentWriteOutMedium.getCloser()
), ),
new CompressedVSizeColumnarIntsSerializer( new CompressedVSizeColumnarIntsSerializer(
columnName, columnName,
@ -60,7 +61,8 @@ public class V3CompressedVSizeColumnarMultiIntsSerializer extends ColumnarMultiI
maxValue, maxValue,
CompressedVSizeColumnarIntsSupplier.maxIntsInBufferForValue(maxValue), CompressedVSizeColumnarIntsSupplier.maxIntsInBufferForValue(maxValue),
IndexIO.BYTE_ORDER, IndexIO.BYTE_ORDER,
compression compression,
segmentWriteOutMedium.getCloser()
) )
); );
} }

View File

@ -29,6 +29,7 @@ import it.unimi.dsi.fastutil.ints.IntIterator;
import org.apache.druid.collections.bitmap.ImmutableBitmap; import org.apache.druid.collections.bitmap.ImmutableBitmap;
import org.apache.druid.collections.bitmap.MutableBitmap; import org.apache.druid.collections.bitmap.MutableBitmap;
import org.apache.druid.io.Channels; import org.apache.druid.io.Channels;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.io.smoosh.FileSmoosher; import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
import org.apache.druid.java.util.common.io.smoosh.SmooshedWriter; import org.apache.druid.java.util.common.io.smoosh.SmooshedWriter;
import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.common.logger.Logger;
@ -81,6 +82,8 @@ public abstract class GlobalDictionaryEncodedFieldColumnWriter<T>
protected final Int2ObjectRBTreeMap<MutableBitmap> arrayElements = new Int2ObjectRBTreeMap<>(); protected final Int2ObjectRBTreeMap<MutableBitmap> arrayElements = new Int2ObjectRBTreeMap<>();
protected final Closer fieldResourceCloser = Closer.create();
protected FixedIndexedIntWriter intermediateValueWriter; protected FixedIndexedIntWriter intermediateValueWriter;
// maybe someday we allow no bitmap indexes or multi-value columns // maybe someday we allow no bitmap indexes or multi-value columns
protected int flags = DictionaryEncodedColumnPartSerde.NO_FLAGS; protected int flags = DictionaryEncodedColumnPartSerde.NO_FLAGS;
@ -300,6 +303,7 @@ public abstract class GlobalDictionaryEncodedFieldColumnWriter<T>
} }
finally { finally {
tmpWriteoutMedium.close(); tmpWriteoutMedium.close();
fieldResourceCloser.close();
} }
} }
@ -312,7 +316,8 @@ public abstract class GlobalDictionaryEncodedFieldColumnWriter<T>
medium, medium,
columnName, columnName,
maxId, maxId,
indexSpec.getDimensionCompression() indexSpec.getDimensionCompression(),
fieldResourceCloser
); );
} else { } else {
encodedValueSerializer = new VSizeColumnarIntsSerializer(medium, maxId); encodedValueSerializer = new VSizeColumnarIntsSerializer(medium, maxId);

View File

@ -95,7 +95,8 @@ public class ScalarDoubleColumnSerializer extends ScalarNestedCommonFormatColumn
segmentWriteOutMedium, segmentWriteOutMedium,
StringUtils.format("%s.double_column", name), StringUtils.format("%s.double_column", name),
ByteOrder.nativeOrder(), ByteOrder.nativeOrder(),
indexSpec.getDimensionCompression() indexSpec.getDimensionCompression(),
segmentWriteOutMedium.getCloser()
); );
doublesSerializer.open(); doublesSerializer.open();
} }

View File

@ -67,7 +67,8 @@ public final class ScalarDoubleFieldColumnWriter extends GlobalDictionaryEncoded
segmentWriteOutMedium, segmentWriteOutMedium,
StringUtils.format("%s.double_column", fieldName), StringUtils.format("%s.double_column", fieldName),
ByteOrder.nativeOrder(), ByteOrder.nativeOrder(),
indexSpec.getDimensionCompression() indexSpec.getDimensionCompression(),
fieldResourceCloser
); );
doublesSerializer.open(); doublesSerializer.open();
} }

View File

@ -97,7 +97,8 @@ public class ScalarLongColumnSerializer extends ScalarNestedCommonFormatColumnSe
StringUtils.format("%s.long_column", name), StringUtils.format("%s.long_column", name),
ByteOrder.nativeOrder(), ByteOrder.nativeOrder(),
indexSpec.getLongEncoding(), indexSpec.getLongEncoding(),
indexSpec.getDimensionCompression() indexSpec.getDimensionCompression(),
segmentWriteOutMedium.getCloser()
); );
longsSerializer.open(); longsSerializer.open();
} }

View File

@ -68,7 +68,8 @@ public final class ScalarLongFieldColumnWriter extends GlobalDictionaryEncodedFi
StringUtils.format("%s.long_column", fieldName), StringUtils.format("%s.long_column", fieldName),
ByteOrder.nativeOrder(), ByteOrder.nativeOrder(),
indexSpec.getLongEncoding(), indexSpec.getLongEncoding(),
indexSpec.getDimensionCompression() indexSpec.getDimensionCompression(),
fieldResourceCloser
); );
longsSerializer.open(); longsSerializer.open();
} }

View File

@ -184,7 +184,8 @@ public abstract class ScalarNestedCommonFormatColumnSerializer<T> extends Nested
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
dictionaryWriter.getCardinality(), dictionaryWriter.getCardinality(),
compressionToUse compressionToUse,
segmentWriteOutMedium.getCloser()
); );
encodedValueSerializer.open(); encodedValueSerializer.open();

View File

@ -342,7 +342,8 @@ public class VariantColumnSerializer extends NestedCommonFormatColumnSerializer
segmentWriteOutMedium, segmentWriteOutMedium,
filenameBase, filenameBase,
cardinality, cardinality,
compressionToUse compressionToUse,
segmentWriteOutMedium.getCloser()
); );
encodedValueSerializer.open(); encodedValueSerializer.open();

View File

@ -167,7 +167,8 @@ public class CompressedColumnarIntsSerializerTest
"test", "test",
CompressedColumnarIntsSupplier.MAX_INTS_IN_BUFFER, CompressedColumnarIntsSupplier.MAX_INTS_IN_BUFFER,
byteOrder, byteOrder,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
serializer.open(); serializer.open();
@ -196,7 +197,8 @@ public class CompressedColumnarIntsSerializerTest
"test", "test",
chunkFactor, chunkFactor,
byteOrder, byteOrder,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
CompressedColumnarIntsSupplier supplierFromList = CompressedColumnarIntsSupplier.fromList( CompressedColumnarIntsSupplier supplierFromList = CompressedColumnarIntsSupplier.fromList(
IntArrayList.wrap(vals), IntArrayList.wrap(vals),
@ -227,6 +229,7 @@ public class CompressedColumnarIntsSerializerTest
Assert.assertEquals(vals[i], columnarInts.get(i)); Assert.assertEquals(vals[i], columnarInts.get(i));
} }
CloseableUtils.closeAndWrapExceptions(columnarInts); CloseableUtils.closeAndWrapExceptions(columnarInts);
CloseableUtils.closeAndWrapExceptions(segmentWriteOutMedium);
} }
private void checkV2SerializedSizeAndData(int chunkFactor) throws Exception private void checkV2SerializedSizeAndData(int chunkFactor) throws Exception
@ -236,7 +239,6 @@ public class CompressedColumnarIntsSerializerTest
CompressedColumnarIntsSerializer writer = new CompressedColumnarIntsSerializer( CompressedColumnarIntsSerializer writer = new CompressedColumnarIntsSerializer(
"test", "test",
segmentWriteOutMedium,
chunkFactor, chunkFactor,
byteOrder, byteOrder,
compressionStrategy, compressionStrategy,
@ -244,8 +246,10 @@ public class CompressedColumnarIntsSerializerTest
segmentWriteOutMedium, segmentWriteOutMedium,
"test", "test",
compressionStrategy, compressionStrategy,
Long.BYTES * 10000 Long.BYTES * 10000,
) segmentWriteOutMedium.getCloser()
),
segmentWriteOutMedium.getCloser()
); );
writer.open(); writer.open();

View File

@ -147,7 +147,8 @@ public class CompressedDoublesSerdeTest
segmentWriteOutMedium, segmentWriteOutMedium,
"test", "test",
order, order,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
serializer.open(); serializer.open();
@ -160,12 +161,14 @@ public class CompressedDoublesSerdeTest
public void testWithValues(double[] values) throws Exception public void testWithValues(double[] values) throws Exception
{ {
final SegmentWriteOutMedium segmentWriteOutMedium = new OffHeapMemorySegmentWriteOutMedium();
ColumnarDoublesSerializer serializer = CompressionFactory.getDoubleSerializer( ColumnarDoublesSerializer serializer = CompressionFactory.getDoubleSerializer(
"test", "test",
new OffHeapMemorySegmentWriteOutMedium(), segmentWriteOutMedium,
"test", "test",
order, order,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
serializer.open(); serializer.open();
@ -190,6 +193,9 @@ public class CompressedDoublesSerdeTest
} }
testConcurrentThreadReads(supplier, doubles, values); testConcurrentThreadReads(supplier, doubles, values);
} }
finally {
segmentWriteOutMedium.close();
}
} }
private void tryFill(ColumnarDoubles indexed, double[] vals, final int startIndex, final int size) private void tryFill(ColumnarDoubles indexed, double[] vals, final int startIndex, final int size)

View File

@ -154,7 +154,8 @@ public class CompressedFloatsSerdeTest
segmentWriteOutMedium, segmentWriteOutMedium,
"test", "test",
order, order,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
serializer.open(); serializer.open();
@ -167,12 +168,14 @@ public class CompressedFloatsSerdeTest
public void testWithValues(float[] values) throws Exception public void testWithValues(float[] values) throws Exception
{ {
SegmentWriteOutMedium segmentWriteOutMedium = new OffHeapMemorySegmentWriteOutMedium();
ColumnarFloatsSerializer serializer = CompressionFactory.getFloatSerializer( ColumnarFloatsSerializer serializer = CompressionFactory.getFloatSerializer(
"test", "test",
new OffHeapMemorySegmentWriteOutMedium(), segmentWriteOutMedium,
"test", "test",
order, order,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
serializer.open(); serializer.open();

View File

@ -21,6 +21,7 @@ package org.apache.druid.segment.data;
import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -95,13 +96,15 @@ public class CompressedLongsAutoEncodingSerdeTest
public void testValues(long[] values) throws Exception public void testValues(long[] values) throws Exception
{ {
SegmentWriteOutMedium segmentWriteOutMedium = new OffHeapMemorySegmentWriteOutMedium();
ColumnarLongsSerializer serializer = CompressionFactory.getLongSerializer( ColumnarLongsSerializer serializer = CompressionFactory.getLongSerializer(
"test", "test",
new OffHeapMemorySegmentWriteOutMedium(), segmentWriteOutMedium,
"test", "test",
order, order,
encodingStrategy, encodingStrategy,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
serializer.open(); serializer.open();
@ -119,6 +122,7 @@ public class CompressedLongsAutoEncodingSerdeTest
assertIndexMatchesVals(longs, values); assertIndexMatchesVals(longs, values);
longs.close(); longs.close();
segmentWriteOutMedium.close();
} }
private void assertIndexMatchesVals(ColumnarLongs indexed, long[] vals) private void assertIndexMatchesVals(ColumnarLongs indexed, long[] vals)

View File

@ -154,7 +154,8 @@ public class CompressedLongsSerdeTest
"test", "test",
order, order,
encodingStrategy, encodingStrategy,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
serializer.open(); serializer.open();
@ -173,13 +174,15 @@ public class CompressedLongsSerdeTest
public void testValues(long[] values) throws Exception public void testValues(long[] values) throws Exception
{ {
SegmentWriteOutMedium segmentWriteOutMedium = new OffHeapMemorySegmentWriteOutMedium();
ColumnarLongsSerializer serializer = CompressionFactory.getLongSerializer( ColumnarLongsSerializer serializer = CompressionFactory.getLongSerializer(
"test", "test",
new OffHeapMemorySegmentWriteOutMedium(), segmentWriteOutMedium,
"test", "test",
order, order,
encodingStrategy, encodingStrategy,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
serializer.open(); serializer.open();
@ -206,6 +209,9 @@ public class CompressedLongsSerdeTest
testSupplierSerde(supplier, values); testSupplierSerde(supplier, values);
testConcurrentThreadReads(supplier, longs, values); testConcurrentThreadReads(supplier, longs, values);
} }
finally {
segmentWriteOutMedium.close();
}
} }
private void tryFill(ColumnarLongs indexed, long[] vals, final int startIndex, final int size) private void tryFill(ColumnarLongs indexed, long[] vals, final int startIndex, final int size)

View File

@ -124,7 +124,8 @@ public class CompressedVSizeColumnarIntsSerializerTest
vals.length > 0 ? Ints.max(vals) : 0, vals.length > 0 ? Ints.max(vals) : 0,
chunkSize, chunkSize,
byteOrder, byteOrder,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
CompressedVSizeColumnarIntsSupplier supplierFromList = CompressedVSizeColumnarIntsSupplier.fromList( CompressedVSizeColumnarIntsSupplier supplierFromList = CompressedVSizeColumnarIntsSupplier.fromList(
IntArrayList.wrap(vals), IntArrayList.wrap(vals),
@ -197,16 +198,17 @@ public class CompressedVSizeColumnarIntsSerializerTest
segmentWriteOutMedium, segmentWriteOutMedium,
"test", "test",
compressionStrategy, compressionStrategy,
Long.BYTES * 10000 Long.BYTES * 10000,
segmentWriteOutMedium.getCloser()
); );
CompressedVSizeColumnarIntsSerializer serializer = new CompressedVSizeColumnarIntsSerializer( CompressedVSizeColumnarIntsSerializer serializer = new CompressedVSizeColumnarIntsSerializer(
"test", "test",
segmentWriteOutMedium,
maxValue, maxValue,
maxChunkSize, maxChunkSize,
byteOrder, byteOrder,
compressionStrategy, compressionStrategy,
genericIndexed genericIndexed,
segmentWriteOutMedium.getCloser()
); );
serializer.open(); serializer.open();
@ -233,16 +235,17 @@ public class CompressedVSizeColumnarIntsSerializerTest
segmentWriteOutMedium, segmentWriteOutMedium,
"test", "test",
compressionStrategy, compressionStrategy,
Long.BYTES * 10000 Long.BYTES * 10000,
segmentWriteOutMedium.getCloser()
); );
CompressedVSizeColumnarIntsSerializer writer = new CompressedVSizeColumnarIntsSerializer( CompressedVSizeColumnarIntsSerializer writer = new CompressedVSizeColumnarIntsSerializer(
columnName, columnName,
segmentWriteOutMedium,
vals.length > 0 ? Ints.max(vals) : 0, vals.length > 0 ? Ints.max(vals) : 0,
chunkSize, chunkSize,
byteOrder, byteOrder,
compressionStrategy, compressionStrategy,
genericIndexed genericIndexed,
segmentWriteOutMedium.getCloser()
); );
writer.open(); writer.open();
for (int val : vals) { for (int val : vals) {

View File

@ -186,7 +186,8 @@ public class CompressedVariableSizeBlobColumnTest
final CompressionStrategy compressionStrategy = CompressionStrategy.LZ4; final CompressionStrategy compressionStrategy = CompressionStrategy.LZ4;
CompressedLongsSerializer serializer = new CompressedLongsSerializer( CompressedLongsSerializer serializer = new CompressedLongsSerializer(
writeOutMedium, writeOutMedium,
compressionStrategy compressionStrategy,
writeOutMedium.getCloser()
); );
serializer.open(); serializer.open();
@ -204,6 +205,7 @@ public class CompressedVariableSizeBlobColumnTest
serializer.writeTo(writer, smoosher); serializer.writeTo(writer, smoosher);
writer.close(); writer.close();
smoosher.close(); smoosher.close();
writeOutMedium.close();
SmooshedFileMapper fileMapper = SmooshedFileMapper.load(tmpFile); SmooshedFileMapper fileMapper = SmooshedFileMapper.load(tmpFile);
ByteBuffer base = fileMapper.mapFile(fileNameBase); ByteBuffer base = fileMapper.mapFile(fileNameBase);

View File

@ -206,7 +206,8 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest
"offset", "offset",
offsetChunkFactor, offsetChunkFactor,
byteOrder, byteOrder,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
CompressedVSizeColumnarIntsSerializer valueWriter = new CompressedVSizeColumnarIntsSerializer( CompressedVSizeColumnarIntsSerializer valueWriter = new CompressedVSizeColumnarIntsSerializer(
TEST_COLUMN_NAME, TEST_COLUMN_NAME,
@ -215,7 +216,8 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest
maxValue, maxValue,
valueChunkFactor, valueChunkFactor,
byteOrder, byteOrder,
compressionStrategy compressionStrategy,
segmentWriteOutMedium.getCloser()
); );
V3CompressedVSizeColumnarMultiIntsSerializer writer = V3CompressedVSizeColumnarMultiIntsSerializer writer =
new V3CompressedVSizeColumnarMultiIntsSerializer(TEST_COLUMN_NAME, offsetWriter, valueWriter); new V3CompressedVSizeColumnarMultiIntsSerializer(TEST_COLUMN_NAME, offsetWriter, valueWriter);
@ -271,7 +273,6 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest
try (SegmentWriteOutMedium segmentWriteOutMedium = new OffHeapMemorySegmentWriteOutMedium()) { try (SegmentWriteOutMedium segmentWriteOutMedium = new OffHeapMemorySegmentWriteOutMedium()) {
CompressedColumnarIntsSerializer offsetWriter = new CompressedColumnarIntsSerializer( CompressedColumnarIntsSerializer offsetWriter = new CompressedColumnarIntsSerializer(
TEST_COLUMN_NAME, TEST_COLUMN_NAME,
segmentWriteOutMedium,
offsetChunkFactor, offsetChunkFactor,
byteOrder, byteOrder,
compressionStrategy, compressionStrategy,
@ -279,24 +280,27 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest
segmentWriteOutMedium, segmentWriteOutMedium,
"offset", "offset",
compressionStrategy, compressionStrategy,
Long.BYTES * 250000 Long.BYTES * 250000,
) segmentWriteOutMedium.getCloser()
),
segmentWriteOutMedium.getCloser()
); );
GenericIndexedWriter genericIndexed = GenericIndexedWriter.ofCompressedByteBuffers( GenericIndexedWriter genericIndexed = GenericIndexedWriter.ofCompressedByteBuffers(
segmentWriteOutMedium, segmentWriteOutMedium,
"value", "value",
compressionStrategy, compressionStrategy,
Long.BYTES * 250000 Long.BYTES * 250000,
segmentWriteOutMedium.getCloser()
); );
CompressedVSizeColumnarIntsSerializer valueWriter = new CompressedVSizeColumnarIntsSerializer( CompressedVSizeColumnarIntsSerializer valueWriter = new CompressedVSizeColumnarIntsSerializer(
TEST_COLUMN_NAME, TEST_COLUMN_NAME,
segmentWriteOutMedium,
maxValue, maxValue,
valueChunkFactor, valueChunkFactor,
byteOrder, byteOrder,
compressionStrategy, compressionStrategy,
genericIndexed genericIndexed,
segmentWriteOutMedium.getCloser()
); );
V3CompressedVSizeColumnarMultiIntsSerializer writer = V3CompressedVSizeColumnarMultiIntsSerializer writer =
new V3CompressedVSizeColumnarMultiIntsSerializer(TEST_COLUMN_NAME, offsetWriter, valueWriter); new V3CompressedVSizeColumnarMultiIntsSerializer(TEST_COLUMN_NAME, offsetWriter, valueWriter);
@ -347,7 +351,6 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest
) { ) {
CompressedColumnarIntsSerializer offsetWriter = new CompressedColumnarIntsSerializer( CompressedColumnarIntsSerializer offsetWriter = new CompressedColumnarIntsSerializer(
TEST_COLUMN_NAME, TEST_COLUMN_NAME,
segmentWriteOutMedium,
offsetChunkFactor, offsetChunkFactor,
byteOrder, byteOrder,
compressionStrategy, compressionStrategy,
@ -355,24 +358,27 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest
segmentWriteOutMedium, segmentWriteOutMedium,
"offset", "offset",
compressionStrategy, compressionStrategy,
Long.BYTES * 250000 Long.BYTES * 250000,
) segmentWriteOutMedium.getCloser()
),
segmentWriteOutMedium.getCloser()
); );
GenericIndexedWriter genericIndexed = GenericIndexedWriter.ofCompressedByteBuffers( GenericIndexedWriter genericIndexed = GenericIndexedWriter.ofCompressedByteBuffers(
segmentWriteOutMedium, segmentWriteOutMedium,
"value", "value",
compressionStrategy, compressionStrategy,
Long.BYTES * 250000 Long.BYTES * 250000,
segmentWriteOutMedium.getCloser()
); );
CompressedVSizeColumnarIntsSerializer valueWriter = new CompressedVSizeColumnarIntsSerializer( CompressedVSizeColumnarIntsSerializer valueWriter = new CompressedVSizeColumnarIntsSerializer(
TEST_COLUMN_NAME, TEST_COLUMN_NAME,
segmentWriteOutMedium,
maxValue, maxValue,
valueChunkFactor, valueChunkFactor,
byteOrder, byteOrder,
compressionStrategy, compressionStrategy,
genericIndexed genericIndexed,
segmentWriteOutMedium.getCloser()
); );
V3CompressedVSizeColumnarMultiIntsSerializer writer = V3CompressedVSizeColumnarMultiIntsSerializer writer =
new V3CompressedVSizeColumnarMultiIntsSerializer(TEST_COLUMN_NAME, offsetWriter, valueWriter); new V3CompressedVSizeColumnarMultiIntsSerializer(TEST_COLUMN_NAME, offsetWriter, valueWriter);