LUCENE-3892: Use int[] arrays to buffer data instead of long[] arrays.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/pforcodec_3892@1370819 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Adrien Grand 2012-08-08 16:12:29 +00:00
parent 5e948be185
commit c06b36c762
4 changed files with 34 additions and 34 deletions

View File

@ -298,8 +298,8 @@ public final class BlockPackedPostingsReader extends PostingsReaderBase {
final class BlockDocsEnum extends DocsEnum { final class BlockDocsEnum extends DocsEnum {
private final byte[] encoded; private final byte[] encoded;
private final long[] docDeltaBuffer = new long[MIN_DATA_SIZE]; private final int[] docDeltaBuffer = new int[MIN_DATA_SIZE];
private final long[] freqBuffer = new long[MIN_DATA_SIZE]; private final int[] freqBuffer = new int[MIN_DATA_SIZE];
private int docBufferUpto; private int docBufferUpto;
@ -544,9 +544,9 @@ public final class BlockPackedPostingsReader extends PostingsReaderBase {
private final byte[] encoded; private final byte[] encoded;
private final long[] docDeltaBuffer = new long[MIN_DATA_SIZE]; private final int[] docDeltaBuffer = new int[MIN_DATA_SIZE];
private final long[] freqBuffer = new long[MIN_DATA_SIZE]; private final int[] freqBuffer = new int[MIN_DATA_SIZE];
private final long[] posDeltaBuffer = new long[MIN_DATA_SIZE]; private final int[] posDeltaBuffer = new int[MIN_DATA_SIZE];
private int docBufferUpto; private int docBufferUpto;
private int posBufferUpto; private int posBufferUpto;
@ -949,13 +949,13 @@ public final class BlockPackedPostingsReader extends PostingsReaderBase {
private final byte[] encoded; private final byte[] encoded;
private final long[] docDeltaBuffer = new long[MIN_DATA_SIZE]; private final int[] docDeltaBuffer = new int[MIN_DATA_SIZE];
private final long[] freqBuffer = new long[MIN_DATA_SIZE]; private final int[] freqBuffer = new int[MIN_DATA_SIZE];
private final long[] posDeltaBuffer = new long[MIN_DATA_SIZE]; private final int[] posDeltaBuffer = new int[MIN_DATA_SIZE];
private final long[] payloadLengthBuffer; private final int[] payloadLengthBuffer;
private final long[] offsetStartDeltaBuffer; private final int[] offsetStartDeltaBuffer;
private final long[] offsetLengthBuffer; private final int[] offsetLengthBuffer;
private byte[] payloadBytes; private byte[] payloadBytes;
private int payloadByteUpto; private int payloadByteUpto;
@ -1030,8 +1030,8 @@ public final class BlockPackedPostingsReader extends PostingsReaderBase {
encoded = new byte[MIN_ENCODED_SIZE]; encoded = new byte[MIN_ENCODED_SIZE];
indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
if (indexHasOffsets) { if (indexHasOffsets) {
offsetStartDeltaBuffer = new long[MIN_DATA_SIZE]; offsetStartDeltaBuffer = new int[MIN_DATA_SIZE];
offsetLengthBuffer = new long[MIN_DATA_SIZE]; offsetLengthBuffer = new int[MIN_DATA_SIZE];
} else { } else {
offsetStartDeltaBuffer = null; offsetStartDeltaBuffer = null;
offsetLengthBuffer = null; offsetLengthBuffer = null;
@ -1041,7 +1041,7 @@ public final class BlockPackedPostingsReader extends PostingsReaderBase {
indexHasPayloads = fieldInfo.hasPayloads(); indexHasPayloads = fieldInfo.hasPayloads();
if (indexHasPayloads) { if (indexHasPayloads) {
payloadLengthBuffer = new long[MIN_DATA_SIZE]; payloadLengthBuffer = new int[MIN_DATA_SIZE];
payloadBytes = new byte[128]; payloadBytes = new byte[128];
payload = new BytesRef(); payload = new BytesRef();
} else { } else {

View File

@ -83,14 +83,14 @@ public final class BlockPackedPostingsWriter extends PostingsWriterBase {
private long posTermStartFP; private long posTermStartFP;
private long payTermStartFP; private long payTermStartFP;
final long[] docDeltaBuffer; final int[] docDeltaBuffer;
final long[] freqBuffer; final int[] freqBuffer;
private int docBufferUpto; private int docBufferUpto;
final long[] posDeltaBuffer; final int[] posDeltaBuffer;
final long[] payloadLengthBuffer; final int[] payloadLengthBuffer;
final long[] offsetStartDeltaBuffer; final int[] offsetStartDeltaBuffer;
final long[] offsetLengthBuffer; final int[] offsetLengthBuffer;
private int posBufferUpto; private int posBufferUpto;
private byte[] payloadBytes; private byte[] payloadBytes;
@ -125,22 +125,22 @@ public final class BlockPackedPostingsWriter extends PostingsWriterBase {
CodecUtil.writeHeader(docOut, DOC_CODEC, VERSION_CURRENT); CodecUtil.writeHeader(docOut, DOC_CODEC, VERSION_CURRENT);
forUtil = new ForUtil(acceptableOverheadRatio, docOut); forUtil = new ForUtil(acceptableOverheadRatio, docOut);
if (state.fieldInfos.hasProx()) { if (state.fieldInfos.hasProx()) {
posDeltaBuffer = new long[MIN_DATA_SIZE]; posDeltaBuffer = new int[MIN_DATA_SIZE];
posOut = state.directory.createOutput(IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, BlockPackedPostingsFormat.POS_EXTENSION), posOut = state.directory.createOutput(IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, BlockPackedPostingsFormat.POS_EXTENSION),
state.context); state.context);
CodecUtil.writeHeader(posOut, POS_CODEC, VERSION_CURRENT); CodecUtil.writeHeader(posOut, POS_CODEC, VERSION_CURRENT);
if (state.fieldInfos.hasPayloads()) { if (state.fieldInfos.hasPayloads()) {
payloadBytes = new byte[128]; payloadBytes = new byte[128];
payloadLengthBuffer = new long[MIN_DATA_SIZE]; payloadLengthBuffer = new int[MIN_DATA_SIZE];
} else { } else {
payloadBytes = null; payloadBytes = null;
payloadLengthBuffer = null; payloadLengthBuffer = null;
} }
if (state.fieldInfos.hasOffsets()) { if (state.fieldInfos.hasOffsets()) {
offsetStartDeltaBuffer = new long[MIN_DATA_SIZE]; offsetStartDeltaBuffer = new int[MIN_DATA_SIZE];
offsetLengthBuffer = new long[MIN_DATA_SIZE]; offsetLengthBuffer = new int[MIN_DATA_SIZE];
} else { } else {
offsetStartDeltaBuffer = null; offsetStartDeltaBuffer = null;
offsetLengthBuffer = null; offsetLengthBuffer = null;
@ -167,8 +167,8 @@ public final class BlockPackedPostingsWriter extends PostingsWriterBase {
} }
} }
docDeltaBuffer = new long[MIN_DATA_SIZE]; docDeltaBuffer = new int[MIN_DATA_SIZE];
freqBuffer = new long[MIN_DATA_SIZE]; freqBuffer = new int[MIN_DATA_SIZE];
skipWriter = new BlockPackedSkipWriter(maxSkipLevels, skipWriter = new BlockPackedSkipWriter(maxSkipLevels,
BlockPackedPostingsFormat.BLOCK_SIZE, BlockPackedPostingsFormat.BLOCK_SIZE,

View File

@ -144,7 +144,7 @@ final class ForUtil {
* @param out the destination output * @param out the destination output
* @throws IOException * @throws IOException
*/ */
void writeBlock(long[] data, byte[] encoded, IndexOutput out) throws IOException { void writeBlock(int[] data, byte[] encoded, IndexOutput out) throws IOException {
if (isAllEqual(data)) { if (isAllEqual(data)) {
out.writeVInt(ALL_VALUES_EQUAL); out.writeVInt(ALL_VALUES_EQUAL);
out.writeInt((int) data[0]); out.writeInt((int) data[0]);
@ -173,7 +173,7 @@ final class ForUtil {
* @param decoded where to write decoded data * @param decoded where to write decoded data
* @throws IOException * @throws IOException
*/ */
void readBlock(IndexInput in, byte[] encoded, long[] decoded) throws IOException { void readBlock(IndexInput in, byte[] encoded, int[] decoded) throws IOException {
final int numBits = in.readVInt(); final int numBits = in.readVInt();
assert numBits <= 32 : numBits; assert numBits <= 32 : numBits;
@ -213,8 +213,8 @@ final class ForUtil {
/** /**
* Read values that have been written using variable-length encoding instead of bit-packing. * Read values that have been written using variable-length encoding instead of bit-packing.
*/ */
static void readVIntBlock(IndexInput docIn, long[] docBuffer, static void readVIntBlock(IndexInput docIn, int[] docBuffer,
long[] freqBuffer, int num, boolean indexHasFreq) throws IOException { int[] freqBuffer, int num, boolean indexHasFreq) throws IOException {
if (indexHasFreq) { if (indexHasFreq) {
for(int i=0;i<num;i++) { for(int i=0;i<num;i++) {
final int code = docIn.readVInt(); final int code = docIn.readVInt();
@ -233,7 +233,7 @@ final class ForUtil {
} }
// nocommit: we must have a util function for this, hmm? // nocommit: we must have a util function for this, hmm?
private static boolean isAllEqual(final long[] data) { private static boolean isAllEqual(final int[] data) {
final long v = data[0]; final long v = data[0];
for (int i = 1; i < BLOCK_SIZE; ++i) { for (int i = 1; i < BLOCK_SIZE; ++i) {
if (data[i] != v) { if (data[i] != v) {
@ -247,7 +247,7 @@ final class ForUtil {
* Compute the number of bits required to serialize any of the longs in * Compute the number of bits required to serialize any of the longs in
* <code>data</code>. * <code>data</code>.
*/ */
private static int bitsRequired(final long[] data) { private static int bitsRequired(final int[] data) {
long or = 0; long or = 0;
for (int i = 0; i < BLOCK_SIZE; ++i) { for (int i = 0; i < BLOCK_SIZE; ++i) {
or |= data[i]; or |= data[i];

View File

@ -39,7 +39,7 @@ public class TestForUtil extends LuceneTestCase {
public void testEncodeDecode() throws IOException { public void testEncodeDecode() throws IOException {
final int iterations = RandomInts.randomIntBetween(random(), 1, 1000); final int iterations = RandomInts.randomIntBetween(random(), 1, 1000);
final float acceptableOverheadRatio = random().nextFloat(); final float acceptableOverheadRatio = random().nextFloat();
final long[] values = new long[iterations * BLOCK_SIZE + ForUtil.MIN_DATA_SIZE]; final int[] values = new int[iterations * BLOCK_SIZE + ForUtil.MIN_DATA_SIZE];
for (int i = 0; i < iterations; ++i) { for (int i = 0; i < iterations; ++i) {
final int bpv = random().nextInt(32); final int bpv = random().nextInt(32);
if (bpv == 0) { if (bpv == 0) {
@ -81,7 +81,7 @@ public class TestForUtil extends LuceneTestCase {
forUtil.skipBlock(in); forUtil.skipBlock(in);
continue; continue;
} }
final long[] restored = new long[MIN_DATA_SIZE]; final int[] restored = new int[MIN_DATA_SIZE];
forUtil.readBlock(in, new byte[MIN_ENCODED_SIZE], restored); forUtil.readBlock(in, new byte[MIN_ENCODED_SIZE], restored);
assertArrayEquals(Arrays.copyOfRange(values, iterations * BLOCK_SIZE, (iterations + 1) * BLOCK_SIZE), assertArrayEquals(Arrays.copyOfRange(values, iterations * BLOCK_SIZE, (iterations + 1) * BLOCK_SIZE),
Arrays.copyOf(restored, BLOCK_SIZE)); Arrays.copyOf(restored, BLOCK_SIZE));