replace div with shift since idiv takes ~40 cycles and compiler can't do strength reduction w/o knowing ops are non-negative

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@553947 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Yonik Seeley 2007-07-06 16:34:40 +00:00
parent 4db9625e28
commit d7c0af3f30
4 changed files with 9 additions and 9 deletions

View File

@ -323,7 +323,7 @@ final class DocumentWriter {
if (lo >= hi) if (lo >= hi)
return; return;
int mid = (lo + hi) / 2; int mid = (lo + hi) >>> 1;
if (postings[lo].term.compareTo(postings[mid].term) > 0) { if (postings[lo].term.compareTo(postings[mid].term) > 0) {
Posting tmp = postings[lo]; Posting tmp = postings[lo];

View File

@ -734,7 +734,7 @@ final class DocumentsWriter {
if (lo >= hi) if (lo >= hi)
return; return;
int mid = (lo + hi) / 2; int mid = (lo + hi) >>> 1;
if (comparePostings(postings[lo], postings[mid]) > 0) { if (comparePostings(postings[lo], postings[mid]) > 0) {
Posting tmp = postings[lo]; Posting tmp = postings[lo];
@ -792,7 +792,7 @@ final class DocumentsWriter {
if (lo >= hi) if (lo >= hi)
return; return;
int mid = (lo + hi) / 2; int mid = (lo + hi) >>> 1;
if (comparePostings(postings[lo].p, postings[mid].p) > 0) { if (comparePostings(postings[lo].p, postings[mid].p) > 0) {
PostingVector tmp = postings[lo]; PostingVector tmp = postings[lo];
@ -880,9 +880,9 @@ final class DocumentsWriter {
int hashSize = fp.postingsHashSize; int hashSize = fp.postingsHashSize;
// Reduce hash so it's between 25-50% full // Reduce hash so it's between 25-50% full
while (fp.numPostings < hashSize/2 && hashSize >= 2) while (fp.numPostings < (hashSize>>1) && hashSize >= 2)
hashSize /= 2; hashSize >>= 1;
hashSize *= 2; hashSize <<= 1;
if (hashSize != fp.postingsHash.length) if (hashSize != fp.postingsHash.length)
fp.rehashPostings(hashSize); fp.rehashPostings(hashSize);
@ -1598,7 +1598,7 @@ final class DocumentsWriter {
postingsHash = newHash; postingsHash = newHash;
postingsHashSize = newSize; postingsHashSize = newSize;
postingsHashHalfSize = newSize/2; postingsHashHalfSize = newSize >> 1;
} }
final ByteSliceReader vectorSliceReader = new ByteSliceReader(); final ByteSliceReader vectorSliceReader = new ByteSliceReader();

View File

@ -82,7 +82,7 @@ final class FieldsReader {
assert ((int) (indexStream.length()/8)) >= size + this.docStoreOffset; assert ((int) (indexStream.length()/8)) >= size + this.docStoreOffset;
} else { } else {
this.docStoreOffset = 0; this.docStoreOffset = 0;
this.size = (int) (indexStream.length() / 8); this.size = (int) (indexStream.length() >> 3);
} }
} }

View File

@ -62,7 +62,7 @@ class TermVectorsReader implements Cloneable {
tvfFormat = checkValidFormat(tvf); tvfFormat = checkValidFormat(tvf);
if (-1 == docStoreOffset) { if (-1 == docStoreOffset) {
this.docStoreOffset = 0; this.docStoreOffset = 0;
this.size = (int) (tvx.length() / 8); this.size = (int) (tvx.length() >> 3);
} else { } else {
this.docStoreOffset = docStoreOffset; this.docStoreOffset = docStoreOffset;
this.size = size; this.size = size;