mirror of https://github.com/apache/lucene.git
remove some dead/unnecessary code
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@599131 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
db52f4ed7f
commit
8e79f64fbc
|
@ -726,8 +726,6 @@ final class DocumentsWriter {
|
||||||
fp.docFields[fp.fieldCount++] = field;
|
fp.docFields[fp.fieldCount++] = field;
|
||||||
}
|
}
|
||||||
|
|
||||||
final int numFields = fieldInfos.size();
|
|
||||||
|
|
||||||
// Maybe init the local & global fieldsWriter
|
// Maybe init the local & global fieldsWriter
|
||||||
if (localFieldsWriter == null) {
|
if (localFieldsWriter == null) {
|
||||||
if (fieldsWriter == null) {
|
if (fieldsWriter == null) {
|
||||||
|
@ -764,11 +762,11 @@ final class DocumentsWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Do in-place sort of Posting array */
|
/** Do in-place sort of Posting array */
|
||||||
final void doPostingSort(Posting[] postings, int numPosting) {
|
void doPostingSort(Posting[] postings, int numPosting) {
|
||||||
quickSort(postings, 0, numPosting-1);
|
quickSort(postings, 0, numPosting-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
final void quickSort(Posting[] postings, int lo, int hi) {
|
void quickSort(Posting[] postings, int lo, int hi) {
|
||||||
if (lo >= hi)
|
if (lo >= hi)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
@ -822,11 +820,11 @@ final class DocumentsWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Do in-place sort of PostingVector array */
|
/** Do in-place sort of PostingVector array */
|
||||||
final void doVectorSort(PostingVector[] postings, int numPosting) {
|
void doVectorSort(PostingVector[] postings, int numPosting) {
|
||||||
quickSort(postings, 0, numPosting-1);
|
quickSort(postings, 0, numPosting-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
final void quickSort(PostingVector[] postings, int lo, int hi) {
|
void quickSort(PostingVector[] postings, int lo, int hi) {
|
||||||
if (lo >= hi)
|
if (lo >= hi)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
@ -1040,7 +1038,6 @@ final class DocumentsWriter {
|
||||||
|
|
||||||
/** Write vInt into freq stream of current Posting */
|
/** Write vInt into freq stream of current Posting */
|
||||||
public void writeFreqVInt(int i) {
|
public void writeFreqVInt(int i) {
|
||||||
int upto = 0;
|
|
||||||
while ((i & ~0x7F) != 0) {
|
while ((i & ~0x7F) != 0) {
|
||||||
writeFreqByte((byte)((i & 0x7f) | 0x80));
|
writeFreqByte((byte)((i & 0x7f) | 0x80));
|
||||||
i >>>= 7;
|
i >>>= 7;
|
||||||
|
@ -1050,7 +1047,6 @@ final class DocumentsWriter {
|
||||||
|
|
||||||
/** Write vInt into prox stream of current Posting */
|
/** Write vInt into prox stream of current Posting */
|
||||||
public void writeProxVInt(int i) {
|
public void writeProxVInt(int i) {
|
||||||
int upto = 0;
|
|
||||||
while ((i & ~0x7F) != 0) {
|
while ((i & ~0x7F) != 0) {
|
||||||
writeProxByte((byte)((i & 0x7f) | 0x80));
|
writeProxByte((byte)((i & 0x7f) | 0x80));
|
||||||
i >>>= 7;
|
i >>>= 7;
|
||||||
|
@ -1106,7 +1102,6 @@ final class DocumentsWriter {
|
||||||
/** Write vInt into offsets stream of current
|
/** Write vInt into offsets stream of current
|
||||||
* PostingVector */
|
* PostingVector */
|
||||||
public void writeOffsetVInt(int i) {
|
public void writeOffsetVInt(int i) {
|
||||||
int upto = 0;
|
|
||||||
while ((i & ~0x7F) != 0) {
|
while ((i & ~0x7F) != 0) {
|
||||||
writeOffsetByte((byte)((i & 0x7f) | 0x80));
|
writeOffsetByte((byte)((i & 0x7f) | 0x80));
|
||||||
i >>>= 7;
|
i >>>= 7;
|
||||||
|
@ -1132,7 +1127,6 @@ final class DocumentsWriter {
|
||||||
/** Write vInt into pos stream of current
|
/** Write vInt into pos stream of current
|
||||||
* PostingVector */
|
* PostingVector */
|
||||||
public void writePosVInt(int i) {
|
public void writePosVInt(int i) {
|
||||||
int upto = 0;
|
|
||||||
while ((i & ~0x7F) != 0) {
|
while ((i & ~0x7F) != 0) {
|
||||||
writePosByte((byte)((i & 0x7f) | 0x80));
|
writePosByte((byte)((i & 0x7f) | 0x80));
|
||||||
i >>>= 7;
|
i >>>= 7;
|
||||||
|
@ -1240,7 +1234,6 @@ final class DocumentsWriter {
|
||||||
offset = 0;
|
offset = 0;
|
||||||
boost = docBoost;
|
boost = docBoost;
|
||||||
|
|
||||||
final int startNumPostings = numPostings;
|
|
||||||
final int maxFieldLength = writer.getMaxFieldLength();
|
final int maxFieldLength = writer.getMaxFieldLength();
|
||||||
|
|
||||||
final int limit = fieldCount;
|
final int limit = fieldCount;
|
||||||
|
@ -1401,7 +1394,6 @@ final class DocumentsWriter {
|
||||||
final int tokenTextLen = token.termLength();
|
final int tokenTextLen = token.termLength();
|
||||||
|
|
||||||
int code = 0;
|
int code = 0;
|
||||||
int code2 = 0;
|
|
||||||
|
|
||||||
// Compute hashcode
|
// Compute hashcode
|
||||||
int downto = tokenTextLen;
|
int downto = tokenTextLen;
|
||||||
|
@ -1965,7 +1957,6 @@ final class DocumentsWriter {
|
||||||
assert result;
|
assert result;
|
||||||
}
|
}
|
||||||
|
|
||||||
Posting lastPosting = null;
|
|
||||||
final int skipInterval = termsOut.skipInterval;
|
final int skipInterval = termsOut.skipInterval;
|
||||||
currentFieldStorePayloads = fields[0].fieldInfo.storePayloads;
|
currentFieldStorePayloads = fields[0].fieldInfo.storePayloads;
|
||||||
|
|
||||||
|
|
|
@ -17,9 +17,6 @@ package org.apache.lucene.index;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
|
||||||
import org.apache.lucene.index.SegmentInfos;
|
|
||||||
import org.apache.lucene.index.SegmentInfo;
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -562,11 +559,11 @@ final class IndexFileDeleter {
|
||||||
|
|
||||||
int count;
|
int count;
|
||||||
|
|
||||||
final public int IncRef() {
|
public int IncRef() {
|
||||||
return ++count;
|
return ++count;
|
||||||
}
|
}
|
||||||
|
|
||||||
final public int DecRef() {
|
public int DecRef() {
|
||||||
assert count > 0;
|
assert count > 0;
|
||||||
return --count;
|
return --count;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2073,8 +2073,6 @@ public class IndexWriter {
|
||||||
message("flush at addIndexes");
|
message("flush at addIndexes");
|
||||||
flush();
|
flush();
|
||||||
|
|
||||||
int start = segmentInfos.size();
|
|
||||||
|
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
|
|
||||||
startTransaction();
|
startTransaction();
|
||||||
|
@ -2580,7 +2578,6 @@ public class IndexWriter {
|
||||||
try {
|
try {
|
||||||
SegmentInfos sourceSegmentsClone = merge.segmentsClone;
|
SegmentInfos sourceSegmentsClone = merge.segmentsClone;
|
||||||
SegmentInfos sourceSegments = merge.segments;
|
SegmentInfos sourceSegments = merge.segments;
|
||||||
final int numSegments = segmentInfos.size();
|
|
||||||
|
|
||||||
start = ensureContiguousMerge(merge);
|
start = ensureContiguousMerge(merge);
|
||||||
if (infoStream != null)
|
if (infoStream != null)
|
||||||
|
@ -2741,7 +2738,6 @@ public class IndexWriter {
|
||||||
|
|
||||||
assert merge.registerDone;
|
assert merge.registerDone;
|
||||||
|
|
||||||
int mergedDocCount;
|
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -2752,7 +2748,7 @@ public class IndexWriter {
|
||||||
if (infoStream != null)
|
if (infoStream != null)
|
||||||
message("now merge\n merge=" + merge.segString(directory) + "\n index=" + segString());
|
message("now merge\n merge=" + merge.segString(directory) + "\n index=" + segString());
|
||||||
|
|
||||||
mergedDocCount = mergeMiddle(merge);
|
mergeMiddle(merge);
|
||||||
|
|
||||||
success = true;
|
success = true;
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -2833,9 +2829,8 @@ public class IndexWriter {
|
||||||
|
|
||||||
final SegmentInfos sourceSegments = merge.segments;
|
final SegmentInfos sourceSegments = merge.segments;
|
||||||
final int end = sourceSegments.size();
|
final int end = sourceSegments.size();
|
||||||
final int numSegments = segmentInfos.size();
|
|
||||||
|
|
||||||
final int start = ensureContiguousMerge(merge);
|
ensureContiguousMerge(merge);
|
||||||
|
|
||||||
// Check whether this merge will allow us to skip
|
// Check whether this merge will allow us to skip
|
||||||
// merging the doc stores (stored field & vectors).
|
// merging the doc stores (stored field & vectors).
|
||||||
|
@ -2959,7 +2954,6 @@ public class IndexWriter {
|
||||||
assert merge.registerDone;
|
assert merge.registerDone;
|
||||||
|
|
||||||
final SegmentInfos sourceSegments = merge.segments;
|
final SegmentInfos sourceSegments = merge.segments;
|
||||||
final SegmentInfos sourceSegmentsClone = merge.segmentsClone;
|
|
||||||
final int end = sourceSegments.size();
|
final int end = sourceSegments.size();
|
||||||
for(int i=0;i<end;i++)
|
for(int i=0;i<end;i++)
|
||||||
mergingSegments.remove(sourceSegments.info(i));
|
mergingSegments.remove(sourceSegments.info(i));
|
||||||
|
|
|
@ -168,7 +168,6 @@ public abstract class LogMergePolicy extends MergePolicy {
|
||||||
* (mergeFactor at a time) so the {@link MergeScheduler}
|
* (mergeFactor at a time) so the {@link MergeScheduler}
|
||||||
* in use may make use of concurrency. */
|
* in use may make use of concurrency. */
|
||||||
public MergeSpecification findMergesForOptimize(SegmentInfos infos, IndexWriter writer, int maxNumSegments, Set segmentsToOptimize) throws IOException {
|
public MergeSpecification findMergesForOptimize(SegmentInfos infos, IndexWriter writer, int maxNumSegments, Set segmentsToOptimize) throws IOException {
|
||||||
final Directory dir = writer.getDirectory();
|
|
||||||
MergeSpecification spec;
|
MergeSpecification spec;
|
||||||
|
|
||||||
if (!isOptimized(infos, writer, maxNumSegments, segmentsToOptimize)) {
|
if (!isOptimized(infos, writer, maxNumSegments, segmentsToOptimize)) {
|
||||||
|
|
Loading…
Reference in New Issue