mirror of https://github.com/apache/lucene.git
LUCENE-3892: Use int[] arrays to buffer data instead of long[] arrays.
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/pforcodec_3892@1370819 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
5e948be185
commit
c06b36c762
|
@ -298,8 +298,8 @@ public final class BlockPackedPostingsReader extends PostingsReaderBase {
|
|||
final class BlockDocsEnum extends DocsEnum {
|
||||
private final byte[] encoded;
|
||||
|
||||
private final long[] docDeltaBuffer = new long[MIN_DATA_SIZE];
|
||||
private final long[] freqBuffer = new long[MIN_DATA_SIZE];
|
||||
private final int[] docDeltaBuffer = new int[MIN_DATA_SIZE];
|
||||
private final int[] freqBuffer = new int[MIN_DATA_SIZE];
|
||||
|
||||
private int docBufferUpto;
|
||||
|
||||
|
@ -544,9 +544,9 @@ public final class BlockPackedPostingsReader extends PostingsReaderBase {
|
|||
|
||||
private final byte[] encoded;
|
||||
|
||||
private final long[] docDeltaBuffer = new long[MIN_DATA_SIZE];
|
||||
private final long[] freqBuffer = new long[MIN_DATA_SIZE];
|
||||
private final long[] posDeltaBuffer = new long[MIN_DATA_SIZE];
|
||||
private final int[] docDeltaBuffer = new int[MIN_DATA_SIZE];
|
||||
private final int[] freqBuffer = new int[MIN_DATA_SIZE];
|
||||
private final int[] posDeltaBuffer = new int[MIN_DATA_SIZE];
|
||||
|
||||
private int docBufferUpto;
|
||||
private int posBufferUpto;
|
||||
|
@ -949,13 +949,13 @@ public final class BlockPackedPostingsReader extends PostingsReaderBase {
|
|||
|
||||
private final byte[] encoded;
|
||||
|
||||
private final long[] docDeltaBuffer = new long[MIN_DATA_SIZE];
|
||||
private final long[] freqBuffer = new long[MIN_DATA_SIZE];
|
||||
private final long[] posDeltaBuffer = new long[MIN_DATA_SIZE];
|
||||
private final int[] docDeltaBuffer = new int[MIN_DATA_SIZE];
|
||||
private final int[] freqBuffer = new int[MIN_DATA_SIZE];
|
||||
private final int[] posDeltaBuffer = new int[MIN_DATA_SIZE];
|
||||
|
||||
private final long[] payloadLengthBuffer;
|
||||
private final long[] offsetStartDeltaBuffer;
|
||||
private final long[] offsetLengthBuffer;
|
||||
private final int[] payloadLengthBuffer;
|
||||
private final int[] offsetStartDeltaBuffer;
|
||||
private final int[] offsetLengthBuffer;
|
||||
|
||||
private byte[] payloadBytes;
|
||||
private int payloadByteUpto;
|
||||
|
@ -1030,8 +1030,8 @@ public final class BlockPackedPostingsReader extends PostingsReaderBase {
|
|||
encoded = new byte[MIN_ENCODED_SIZE];
|
||||
indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
|
||||
if (indexHasOffsets) {
|
||||
offsetStartDeltaBuffer = new long[MIN_DATA_SIZE];
|
||||
offsetLengthBuffer = new long[MIN_DATA_SIZE];
|
||||
offsetStartDeltaBuffer = new int[MIN_DATA_SIZE];
|
||||
offsetLengthBuffer = new int[MIN_DATA_SIZE];
|
||||
} else {
|
||||
offsetStartDeltaBuffer = null;
|
||||
offsetLengthBuffer = null;
|
||||
|
@ -1041,7 +1041,7 @@ public final class BlockPackedPostingsReader extends PostingsReaderBase {
|
|||
|
||||
indexHasPayloads = fieldInfo.hasPayloads();
|
||||
if (indexHasPayloads) {
|
||||
payloadLengthBuffer = new long[MIN_DATA_SIZE];
|
||||
payloadLengthBuffer = new int[MIN_DATA_SIZE];
|
||||
payloadBytes = new byte[128];
|
||||
payload = new BytesRef();
|
||||
} else {
|
||||
|
|
|
@ -83,14 +83,14 @@ public final class BlockPackedPostingsWriter extends PostingsWriterBase {
|
|||
private long posTermStartFP;
|
||||
private long payTermStartFP;
|
||||
|
||||
final long[] docDeltaBuffer;
|
||||
final long[] freqBuffer;
|
||||
final int[] docDeltaBuffer;
|
||||
final int[] freqBuffer;
|
||||
private int docBufferUpto;
|
||||
|
||||
final long[] posDeltaBuffer;
|
||||
final long[] payloadLengthBuffer;
|
||||
final long[] offsetStartDeltaBuffer;
|
||||
final long[] offsetLengthBuffer;
|
||||
final int[] posDeltaBuffer;
|
||||
final int[] payloadLengthBuffer;
|
||||
final int[] offsetStartDeltaBuffer;
|
||||
final int[] offsetLengthBuffer;
|
||||
private int posBufferUpto;
|
||||
|
||||
private byte[] payloadBytes;
|
||||
|
@ -125,22 +125,22 @@ public final class BlockPackedPostingsWriter extends PostingsWriterBase {
|
|||
CodecUtil.writeHeader(docOut, DOC_CODEC, VERSION_CURRENT);
|
||||
forUtil = new ForUtil(acceptableOverheadRatio, docOut);
|
||||
if (state.fieldInfos.hasProx()) {
|
||||
posDeltaBuffer = new long[MIN_DATA_SIZE];
|
||||
posDeltaBuffer = new int[MIN_DATA_SIZE];
|
||||
posOut = state.directory.createOutput(IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, BlockPackedPostingsFormat.POS_EXTENSION),
|
||||
state.context);
|
||||
CodecUtil.writeHeader(posOut, POS_CODEC, VERSION_CURRENT);
|
||||
|
||||
if (state.fieldInfos.hasPayloads()) {
|
||||
payloadBytes = new byte[128];
|
||||
payloadLengthBuffer = new long[MIN_DATA_SIZE];
|
||||
payloadLengthBuffer = new int[MIN_DATA_SIZE];
|
||||
} else {
|
||||
payloadBytes = null;
|
||||
payloadLengthBuffer = null;
|
||||
}
|
||||
|
||||
if (state.fieldInfos.hasOffsets()) {
|
||||
offsetStartDeltaBuffer = new long[MIN_DATA_SIZE];
|
||||
offsetLengthBuffer = new long[MIN_DATA_SIZE];
|
||||
offsetStartDeltaBuffer = new int[MIN_DATA_SIZE];
|
||||
offsetLengthBuffer = new int[MIN_DATA_SIZE];
|
||||
} else {
|
||||
offsetStartDeltaBuffer = null;
|
||||
offsetLengthBuffer = null;
|
||||
|
@ -167,8 +167,8 @@ public final class BlockPackedPostingsWriter extends PostingsWriterBase {
|
|||
}
|
||||
}
|
||||
|
||||
docDeltaBuffer = new long[MIN_DATA_SIZE];
|
||||
freqBuffer = new long[MIN_DATA_SIZE];
|
||||
docDeltaBuffer = new int[MIN_DATA_SIZE];
|
||||
freqBuffer = new int[MIN_DATA_SIZE];
|
||||
|
||||
skipWriter = new BlockPackedSkipWriter(maxSkipLevels,
|
||||
BlockPackedPostingsFormat.BLOCK_SIZE,
|
||||
|
|
|
@ -144,7 +144,7 @@ final class ForUtil {
|
|||
* @param out the destination output
|
||||
* @throws IOException
|
||||
*/
|
||||
void writeBlock(long[] data, byte[] encoded, IndexOutput out) throws IOException {
|
||||
void writeBlock(int[] data, byte[] encoded, IndexOutput out) throws IOException {
|
||||
if (isAllEqual(data)) {
|
||||
out.writeVInt(ALL_VALUES_EQUAL);
|
||||
out.writeInt((int) data[0]);
|
||||
|
@ -173,7 +173,7 @@ final class ForUtil {
|
|||
* @param decoded where to write decoded data
|
||||
* @throws IOException
|
||||
*/
|
||||
void readBlock(IndexInput in, byte[] encoded, long[] decoded) throws IOException {
|
||||
void readBlock(IndexInput in, byte[] encoded, int[] decoded) throws IOException {
|
||||
final int numBits = in.readVInt();
|
||||
assert numBits <= 32 : numBits;
|
||||
|
||||
|
@ -213,8 +213,8 @@ final class ForUtil {
|
|||
/**
|
||||
* Read values that have been written using variable-length encoding instead of bit-packing.
|
||||
*/
|
||||
static void readVIntBlock(IndexInput docIn, long[] docBuffer,
|
||||
long[] freqBuffer, int num, boolean indexHasFreq) throws IOException {
|
||||
static void readVIntBlock(IndexInput docIn, int[] docBuffer,
|
||||
int[] freqBuffer, int num, boolean indexHasFreq) throws IOException {
|
||||
if (indexHasFreq) {
|
||||
for(int i=0;i<num;i++) {
|
||||
final int code = docIn.readVInt();
|
||||
|
@ -233,7 +233,7 @@ final class ForUtil {
|
|||
}
|
||||
|
||||
// nocommit: we must have a util function for this, hmm?
|
||||
private static boolean isAllEqual(final long[] data) {
|
||||
private static boolean isAllEqual(final int[] data) {
|
||||
final long v = data[0];
|
||||
for (int i = 1; i < BLOCK_SIZE; ++i) {
|
||||
if (data[i] != v) {
|
||||
|
@ -247,7 +247,7 @@ final class ForUtil {
|
|||
* Compute the number of bits required to serialize any of the longs in
|
||||
* <code>data</code>.
|
||||
*/
|
||||
private static int bitsRequired(final long[] data) {
|
||||
private static int bitsRequired(final int[] data) {
|
||||
long or = 0;
|
||||
for (int i = 0; i < BLOCK_SIZE; ++i) {
|
||||
or |= data[i];
|
||||
|
|
|
@ -39,7 +39,7 @@ public class TestForUtil extends LuceneTestCase {
|
|||
public void testEncodeDecode() throws IOException {
|
||||
final int iterations = RandomInts.randomIntBetween(random(), 1, 1000);
|
||||
final float acceptableOverheadRatio = random().nextFloat();
|
||||
final long[] values = new long[iterations * BLOCK_SIZE + ForUtil.MIN_DATA_SIZE];
|
||||
final int[] values = new int[iterations * BLOCK_SIZE + ForUtil.MIN_DATA_SIZE];
|
||||
for (int i = 0; i < iterations; ++i) {
|
||||
final int bpv = random().nextInt(32);
|
||||
if (bpv == 0) {
|
||||
|
@ -81,7 +81,7 @@ public class TestForUtil extends LuceneTestCase {
|
|||
forUtil.skipBlock(in);
|
||||
continue;
|
||||
}
|
||||
final long[] restored = new long[MIN_DATA_SIZE];
|
||||
final int[] restored = new int[MIN_DATA_SIZE];
|
||||
forUtil.readBlock(in, new byte[MIN_ENCODED_SIZE], restored);
|
||||
assertArrayEquals(Arrays.copyOfRange(values, iterations * BLOCK_SIZE, (iterations + 1) * BLOCK_SIZE),
|
||||
Arrays.copyOf(restored, BLOCK_SIZE));
|
||||
|
|
Loading…
Reference in New Issue