mirror of https://github.com/apache/lucene.git
javadocs
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1388662 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c842042045
commit
6f0f9a8a10
|
@ -265,6 +265,7 @@
|
|||
<check-missing-javadocs dir="build/docs/core/org/apache/lucene/document" level="method"/>
|
||||
<check-missing-javadocs dir="build/docs/core/org/apache/lucene/search/similarities" level="method"/>
|
||||
<check-missing-javadocs dir="build/docs/core/org/apache/lucene/index" level="method"/>
|
||||
<check-missing-javadocs dir="build/docs/core/org/apache/lucene/codecs" level="method"/>
|
||||
</sequential>
|
||||
</target>
|
||||
|
||||
|
|
|
@ -36,6 +36,11 @@ public class BlockTermState extends OrdTermState {
|
|||
/** fp into the terms dict primary file (_X.tim) that holds this term */
|
||||
public long blockFilePointer;
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected BlockTermState() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyFrom(TermState _other) {
|
||||
assert _other instanceof BlockTermState : "can not copy from " + _other.getClass().getName();
|
||||
|
|
|
@ -99,12 +99,15 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
|||
|
||||
private final TreeMap<String,FieldReader> fields = new TreeMap<String,FieldReader>();
|
||||
|
||||
// keeps the dirStart offset
|
||||
/** File offset where the directory starts in the terms file. */
|
||||
protected long dirOffset;
|
||||
|
||||
/** File offset where the directory starts in the index file. */
|
||||
protected long indexDirOffset;
|
||||
|
||||
private String segment;
|
||||
|
||||
/** Sole constructor. */
|
||||
public BlockTreeTermsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo info,
|
||||
PostingsReaderBase postingsReader, IOContext ioContext,
|
||||
String segmentSuffix, int indexDivisor)
|
||||
|
@ -179,6 +182,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
|||
}
|
||||
}
|
||||
|
||||
/** Reads terms file header. */
|
||||
protected void readHeader(IndexInput input) throws IOException {
|
||||
CodecUtil.checkHeader(input, BlockTreeTermsWriter.TERMS_CODEC_NAME,
|
||||
BlockTreeTermsWriter.TERMS_VERSION_START,
|
||||
|
@ -186,6 +190,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
|||
dirOffset = input.readLong();
|
||||
}
|
||||
|
||||
/** Reads index file header. */
|
||||
protected void readIndexHeader(IndexInput input) throws IOException {
|
||||
CodecUtil.checkHeader(input, BlockTreeTermsWriter.TERMS_INDEX_CODEC_NAME,
|
||||
BlockTreeTermsWriter.TERMS_INDEX_VERSION_START,
|
||||
|
@ -193,6 +198,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
|||
indexDirOffset = input.readLong();
|
||||
}
|
||||
|
||||
/** Seek {@code input} to the directory offset. */
|
||||
protected void seekDir(IndexInput input, long dirOffset)
|
||||
throws IOException {
|
||||
input.seek(dirOffset);
|
||||
|
@ -251,36 +257,70 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
|||
* returned by {@link FieldReader#computeStats()}.
|
||||
*/
|
||||
public static class Stats {
|
||||
/** How many nodes in the index FST. */
|
||||
public int indexNodeCount;
|
||||
|
||||
/** How many arcs in the index FST. */
|
||||
public int indexArcCount;
|
||||
|
||||
/** Byte size of the index. */
|
||||
public int indexNumBytes;
|
||||
|
||||
/** Total number of terms in the field. */
|
||||
public long totalTermCount;
|
||||
|
||||
/** Total number of bytes (sum of term lengths) across all terms in the field. */
|
||||
public long totalTermBytes;
|
||||
|
||||
|
||||
/** The number of normal (non-floor) blocks in the terms file. */
|
||||
public int nonFloorBlockCount;
|
||||
|
||||
/** The number of floor blocks (meta-blocks larger than the
|
||||
* allowed {@code maxItemsPerBlock}) in the terms file. */
|
||||
public int floorBlockCount;
|
||||
|
||||
/** The number of sub-blocks within the floor blocks. */
|
||||
public int floorSubBlockCount;
|
||||
|
||||
/** The number of "internal" blocks (that have both
|
||||
* terms and sub-blocks). */
|
||||
public int mixedBlockCount;
|
||||
|
||||
/** The number of "leaf" blocks (blocks that have only
|
||||
* terms). */
|
||||
public int termsOnlyBlockCount;
|
||||
|
||||
/** The number of "internal" blocks that do not contain
|
||||
* terms (have only sub-blocks). */
|
||||
public int subBlocksOnlyBlockCount;
|
||||
|
||||
/** Total number of blocks. */
|
||||
public int totalBlockCount;
|
||||
|
||||
/** Number of blocks at each prefix depth. */
|
||||
public int[] blockCountByPrefixLen = new int[10];
|
||||
private int startBlockCount;
|
||||
private int endBlockCount;
|
||||
|
||||
/** Total number of bytes used to store term suffixes. */
|
||||
public long totalBlockSuffixBytes;
|
||||
|
||||
/** Total number of bytes used to store term stats (not
|
||||
* including what the {@link PostingsBaseFormat}
|
||||
* stores. */
|
||||
public long totalBlockStatsBytes;
|
||||
|
||||
// Postings impl plus the other few vInts stored in
|
||||
// the frame:
|
||||
/** Total bytes stored by the {@link PostingsBaseFormat},
|
||||
* plus the other few vInts stored in the frame. */
|
||||
public long totalBlockOtherBytes;
|
||||
|
||||
/** Segment name. */
|
||||
public final String segment;
|
||||
|
||||
/** Field name. */
|
||||
public final String field;
|
||||
|
||||
public Stats(String segment, String field) {
|
||||
Stats(String segment, String field) {
|
||||
this.segment = segment;
|
||||
this.field = field;
|
||||
}
|
||||
|
@ -386,6 +426,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
|||
final Outputs<BytesRef> fstOutputs = ByteSequenceOutputs.getSingleton();
|
||||
final BytesRef NO_OUTPUT = fstOutputs.getNoOutput();
|
||||
|
||||
/** BlockTree's implementation of {@link Terms}. */
|
||||
public final class FieldReader extends Terms {
|
||||
final long numTerms;
|
||||
final FieldInfo fieldInfo;
|
||||
|
|
|
@ -83,11 +83,18 @@ import org.apache.lucene.util.fst.Util;
|
|||
|
||||
public class BlockTreeTermsWriter extends FieldsConsumer {
|
||||
|
||||
/** Suggested default value for the {@code
|
||||
* minItemsInBlock} parameter to {@link
|
||||
* #BlockTreeTermsWriter(SegmentWriteState,PostingsWriterBase,int,int)}. */
|
||||
public final static int DEFAULT_MIN_BLOCK_SIZE = 25;
|
||||
|
||||
/** Suggested default value for the {@code
|
||||
* maxItemsInBlock} parameter to {@link
|
||||
* #BlockTreeTermsWriter(SegmentWriteState,PostingsWriterBase,int,int)}. */
|
||||
public final static int DEFAULT_MAX_BLOCK_SIZE = 48;
|
||||
|
||||
//public final static boolean DEBUG = false;
|
||||
public final static boolean SAVE_DOT_FILES = false;
|
||||
private final static boolean SAVE_DOT_FILES = false;
|
||||
|
||||
static final int OUTPUT_FLAGS_NUM_BITS = 2;
|
||||
static final int OUTPUT_FLAGS_MASK = 0x3;
|
||||
|
@ -97,15 +104,21 @@ public class BlockTreeTermsWriter extends FieldsConsumer {
|
|||
/** Extension of terms file */
|
||||
static final String TERMS_EXTENSION = "tim";
|
||||
final static String TERMS_CODEC_NAME = "BLOCK_TREE_TERMS_DICT";
|
||||
// Initial format
|
||||
|
||||
/** Initial terms format. */
|
||||
public static final int TERMS_VERSION_START = 0;
|
||||
|
||||
/** Current terms format. */
|
||||
public static final int TERMS_VERSION_CURRENT = TERMS_VERSION_START;
|
||||
|
||||
/** Extension of terms index file */
|
||||
static final String TERMS_INDEX_EXTENSION = "tip";
|
||||
final static String TERMS_INDEX_CODEC_NAME = "BLOCK_TREE_TERMS_INDEX";
|
||||
// Initial format
|
||||
|
||||
/** Initial index format. */
|
||||
public static final int TERMS_INDEX_VERSION_START = 0;
|
||||
|
||||
/** Current index format. */
|
||||
public static final int TERMS_INDEX_VERSION_CURRENT = TERMS_INDEX_VERSION_START;
|
||||
|
||||
private final IndexOutput out;
|
||||
|
@ -175,21 +188,25 @@ public class BlockTreeTermsWriter extends FieldsConsumer {
|
|||
this.indexOut = indexOut;
|
||||
}
|
||||
|
||||
/** Writes the terms file header. */
|
||||
protected void writeHeader(IndexOutput out) throws IOException {
|
||||
CodecUtil.writeHeader(out, TERMS_CODEC_NAME, TERMS_VERSION_CURRENT);
|
||||
out.writeLong(0); // leave space for end index pointer
|
||||
}
|
||||
|
||||
/** Writes the index file header. */
|
||||
protected void writeIndexHeader(IndexOutput out) throws IOException {
|
||||
CodecUtil.writeHeader(out, TERMS_INDEX_CODEC_NAME, TERMS_INDEX_VERSION_CURRENT);
|
||||
out.writeLong(0); // leave space for end index pointer
|
||||
}
|
||||
|
||||
/** Writes the terms file trailer. */
|
||||
protected void writeTrailer(IndexOutput out, long dirStart) throws IOException {
|
||||
out.seek(CodecUtil.headerLength(TERMS_CODEC_NAME));
|
||||
out.writeLong(dirStart);
|
||||
}
|
||||
|
||||
/** Writes the index file trailer. */
|
||||
protected void writeIndexTrailer(IndexOutput indexOut, long dirStart) throws IOException {
|
||||
indexOut.seek(CodecUtil.headerLength(TERMS_INDEX_CODEC_NAME));
|
||||
indexOut.writeLong(dirStart);
|
||||
|
|
|
@ -51,10 +51,13 @@ public abstract class DocValuesArraySource extends Source {
|
|||
TEMPLATES = Collections.unmodifiableMap(templates);
|
||||
}
|
||||
|
||||
/** Returns the {@link DocValuesArraySource} for the given
|
||||
* {@link Type}. */
|
||||
public static DocValuesArraySource forType(Type type) {
|
||||
return TEMPLATES.get(type);
|
||||
}
|
||||
|
||||
/** Number of bytes to encode each doc value. */
|
||||
protected final int bytesPerValue;
|
||||
|
||||
DocValuesArraySource(int bytesPerValue, Type type) {
|
||||
|
@ -66,9 +69,13 @@ public abstract class DocValuesArraySource extends Source {
|
|||
public abstract BytesRef getBytes(int docID, BytesRef ref);
|
||||
|
||||
|
||||
/** Creates a {@link DocValuesArraySource} by loading a
|
||||
* previously saved one from an {@link IndexInput}. */
|
||||
public abstract DocValuesArraySource newFromInput(IndexInput input, int numDocs)
|
||||
throws IOException;
|
||||
|
||||
/** Creates {@link DocValuesArraySource} from a native
|
||||
* array. */
|
||||
public abstract DocValuesArraySource newFromArray(Object array);
|
||||
|
||||
@Override
|
||||
|
@ -76,10 +83,14 @@ public abstract class DocValuesArraySource extends Source {
|
|||
return true;
|
||||
}
|
||||
|
||||
/** Encode a long value into the provided {@link
|
||||
* BytesRef}. */
|
||||
public void toBytes(long value, BytesRef bytesRef) {
|
||||
copyLong(bytesRef, value);
|
||||
}
|
||||
|
||||
/** Encode a double value into the provided {@link
|
||||
* BytesRef}. */
|
||||
public void toBytes(double value, BytesRef bytesRef) {
|
||||
copyLong(bytesRef, Double.doubleToRawLongBits(value));
|
||||
}
|
||||
|
|
|
@ -51,9 +51,17 @@ import org.apache.lucene.util.BytesRef;
|
|||
*/
|
||||
public abstract class DocValuesConsumer {
|
||||
|
||||
/** Spare {@link BytesRef} that subclasses can reuse. */
|
||||
protected final BytesRef spare = new BytesRef();
|
||||
|
||||
/** Returns the {@link Type} of this consumer. */
|
||||
protected abstract Type getType();
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected DocValuesConsumer() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given {@link StorableField} instance to this
|
||||
* {@link DocValuesConsumer}
|
||||
|
|
|
@ -28,6 +28,10 @@ import org.apache.lucene.index.SegmentReadState;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class DocValuesFormat {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected DocValuesFormat() {
|
||||
}
|
||||
|
||||
/** Consumes (writes) doc values during indexing. */
|
||||
public abstract PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException;
|
||||
|
|
|
@ -26,6 +26,11 @@ import org.apache.lucene.index.FieldInfos; // javadocs
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class FieldInfosFormat {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected FieldInfosFormat() {
|
||||
}
|
||||
|
||||
/** Returns a {@link FieldInfosReader} to read field infos
|
||||
* from the index */
|
||||
public abstract FieldInfosReader getFieldInfosReader() throws IOException;
|
||||
|
|
|
@ -28,5 +28,12 @@ import org.apache.lucene.store.IOContext;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class FieldInfosReader {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected FieldInfosReader() {
|
||||
}
|
||||
|
||||
/** Read the {@link FieldInfos} previously written with {@link
|
||||
* FieldInfosWriter}. */
|
||||
public abstract FieldInfos read(Directory directory, String segmentName, IOContext iocontext) throws IOException;
|
||||
}
|
||||
|
|
|
@ -28,5 +28,12 @@ import org.apache.lucene.store.IOContext;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class FieldInfosWriter {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected FieldInfosWriter() {
|
||||
}
|
||||
|
||||
/** Writes the provided {@link FieldInfos} to the
|
||||
* directory. */
|
||||
public abstract void write(Directory directory, String segmentName, FieldInfos infos, IOContext context) throws IOException;
|
||||
}
|
||||
|
|
|
@ -45,12 +45,22 @@ import org.apache.lucene.index.Terms;
|
|||
*/
|
||||
public abstract class FieldsConsumer implements Closeable {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected FieldsConsumer() {
|
||||
}
|
||||
|
||||
/** Add a new field */
|
||||
public abstract TermsConsumer addField(FieldInfo field) throws IOException;
|
||||
|
||||
/** Called when we are done adding everything. */
|
||||
public abstract void close() throws IOException;
|
||||
|
||||
/** Called during merging to merge all {@link Fields} from
|
||||
* sub-readers. This must recurse to merge all postings
|
||||
* (terms, docs, positions, etc.). A {@link
|
||||
* PostingsFormat} can override this default
|
||||
* implementation to do its own merging. */
|
||||
public void merge(MergeState mergeState, Fields fields) throws IOException {
|
||||
for (String field : fields) {
|
||||
mergeState.fieldInfo = mergeState.fieldInfos.fieldInfo(field);
|
||||
|
|
|
@ -29,5 +29,11 @@ import org.apache.lucene.index.Fields;
|
|||
*/
|
||||
|
||||
public abstract class FieldsProducer extends Fields implements Closeable {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected FieldsProducer() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract void close() throws IOException;
|
||||
}
|
||||
|
|
|
@ -43,6 +43,7 @@ package org.apache.lucene.codecs;
|
|||
*/
|
||||
public abstract class FilterCodec extends Codec {
|
||||
|
||||
/** Sole constructor. */
|
||||
public FilterCodec(String name) {
|
||||
super(name);
|
||||
}
|
||||
|
|
|
@ -29,6 +29,12 @@ import org.apache.lucene.util.MutableBits;
|
|||
/** Format for live/deleted documents
|
||||
* @lucene.experimental */
|
||||
public abstract class LiveDocsFormat {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected LiveDocsFormat() {
|
||||
}
|
||||
|
||||
/** Creates a new MutableBits, with all bits set, for the specified size. */
|
||||
public abstract MutableBits newLiveDocs(int size) throws IOException;
|
||||
|
||||
|
|
|
@ -42,6 +42,10 @@ public final class MappingMultiDocsAndPositionsEnum extends DocsAndPositionsEnum
|
|||
int doc = -1;
|
||||
private MergeState mergeState;
|
||||
|
||||
/** Sole constructor. */
|
||||
public MappingMultiDocsAndPositionsEnum() {
|
||||
}
|
||||
|
||||
MappingMultiDocsAndPositionsEnum reset(MultiDocsAndPositionsEnum postingsEnum) {
|
||||
this.numSubs = postingsEnum.getNumSubs();
|
||||
this.subs = postingsEnum.getSubs();
|
||||
|
@ -50,14 +54,19 @@ public final class MappingMultiDocsAndPositionsEnum extends DocsAndPositionsEnum
|
|||
return this;
|
||||
}
|
||||
|
||||
/** Sets the {@link MergeState}, which is used to re-map
|
||||
* document IDs. */
|
||||
public void setMergeState(MergeState mergeState) {
|
||||
this.mergeState = mergeState;
|
||||
}
|
||||
|
||||
/** How many sub-readers we are merging.
|
||||
* @see #getSubs */
|
||||
public int getNumSubs() {
|
||||
return numSubs;
|
||||
}
|
||||
|
||||
/** Returns sub-readers we are merging. */
|
||||
public EnumWithSlice[] getSubs() {
|
||||
return subs;
|
||||
}
|
||||
|
|
|
@ -41,6 +41,10 @@ public final class MappingMultiDocsEnum extends DocsEnum {
|
|||
int doc = -1;
|
||||
private MergeState mergeState;
|
||||
|
||||
/** Sole constructor. */
|
||||
public MappingMultiDocsEnum() {
|
||||
}
|
||||
|
||||
MappingMultiDocsEnum reset(MultiDocsEnum docsEnum) {
|
||||
this.numSubs = docsEnum.getNumSubs();
|
||||
this.subs = docsEnum.getSubs();
|
||||
|
@ -49,14 +53,19 @@ public final class MappingMultiDocsEnum extends DocsEnum {
|
|||
return this;
|
||||
}
|
||||
|
||||
/** Sets the {@link MergeState}, which is used to re-map
|
||||
* document IDs. */
|
||||
public void setMergeState(MergeState mergeState) {
|
||||
this.mergeState = mergeState;
|
||||
}
|
||||
|
||||
/** How many sub-readers we are merging.
|
||||
* @see #getSubs */
|
||||
public int getNumSubs() {
|
||||
return numSubs;
|
||||
}
|
||||
|
||||
/** Returns sub-readers we are merging. */
|
||||
public EnumWithSlice[] getSubs() {
|
||||
return subs;
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ package org.apache.lucene.codecs;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
|
@ -35,7 +36,7 @@ import org.apache.lucene.util.MathUtil;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
|
||||
public abstract class MultiLevelSkipListReader {
|
||||
public abstract class MultiLevelSkipListReader implements Closeable {
|
||||
/** the maximum number of skip levels possible for this index */
|
||||
protected int maxNumberOfSkipLevels;
|
||||
|
||||
|
@ -54,19 +55,35 @@ public abstract class MultiLevelSkipListReader {
|
|||
private int docCount;
|
||||
private boolean haveSkipped;
|
||||
|
||||
private IndexInput[] skipStream; // skipStream for each level
|
||||
private long skipPointer[]; // the start pointer of each skip level
|
||||
private int skipInterval[]; // skipInterval of each level
|
||||
private int[] numSkipped; // number of docs skipped per level
|
||||
/** skipStream for each level. */
|
||||
private IndexInput[] skipStream;
|
||||
|
||||
protected int[] skipDoc; // doc id of current skip entry per level
|
||||
private int lastDoc; // doc id of last read skip entry with docId <= target
|
||||
private long[] childPointer; // child pointer of current skip entry per level
|
||||
private long lastChildPointer; // childPointer of last read skip entry with docId <= target
|
||||
/** The start pointer of each skip level. */
|
||||
private long skipPointer[];
|
||||
|
||||
/** skipInterval of each level. */
|
||||
private int skipInterval[];
|
||||
|
||||
/** Number of docs skipped per level. */
|
||||
private int[] numSkipped;
|
||||
|
||||
/** Doc id of current skip entry per level. */
|
||||
protected int[] skipDoc;
|
||||
|
||||
/** Doc id of last read skip entry with docId <= target. */
|
||||
private int lastDoc;
|
||||
|
||||
/** Child pointer of current skip entry per level. */
|
||||
private long[] childPointer;
|
||||
|
||||
/** childPointer of last read skip entry with docId <=
|
||||
* target. */
|
||||
private long lastChildPointer;
|
||||
|
||||
private boolean inputIsBuffered;
|
||||
private final int skipMultiplier;
|
||||
|
||||
/** Creates a {@code MultiLevelSkipListReader}. */
|
||||
protected MultiLevelSkipListReader(IndexInput skipStream, int maxSkipLevels, int skipInterval, int skipMultiplier) {
|
||||
this.skipStream = new IndexInput[maxSkipLevels];
|
||||
this.skipPointer = new long[maxSkipLevels];
|
||||
|
@ -85,7 +102,9 @@ public abstract class MultiLevelSkipListReader {
|
|||
skipDoc = new int[maxSkipLevels];
|
||||
}
|
||||
|
||||
// skipMultiplier and skipInterval are the same:
|
||||
/** Creates a {@code MultiLevelSkipListReader}, where
|
||||
* {@code skipInterval} and {@code skipMultiplier} are
|
||||
* the same. */
|
||||
protected MultiLevelSkipListReader(IndexInput skipStream, int maxSkipLevels, int skipInterval) {
|
||||
this(skipStream, maxSkipLevels, skipInterval, skipInterval);
|
||||
}
|
||||
|
@ -167,6 +186,7 @@ public abstract class MultiLevelSkipListReader {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
for (int i = 1; i < skipStream.length; i++) {
|
||||
if (skipStream[i] != null) {
|
||||
|
@ -175,7 +195,7 @@ public abstract class MultiLevelSkipListReader {
|
|||
}
|
||||
}
|
||||
|
||||
/** initializes the reader */
|
||||
/** Initializes the reader, for reuse on a new term. */
|
||||
public void init(long skipPointer, int df) {
|
||||
this.skipPointer[0] = skipPointer;
|
||||
this.docCount = df;
|
||||
|
|
|
@ -55,15 +55,16 @@ public abstract class MultiLevelSkipListWriter {
|
|||
/** number of levels in this skip list */
|
||||
protected int numberOfSkipLevels;
|
||||
|
||||
// the skip interval in the list with level = 0
|
||||
/** the skip interval in the list with level = 0 */
|
||||
private int skipInterval;
|
||||
|
||||
// skipInterval used for level > 0
|
||||
/** skipInterval used for level > 0 */
|
||||
private int skipMultiplier;
|
||||
|
||||
// for every skip level a different buffer is used
|
||||
/** for every skip level a different buffer is used */
|
||||
private RAMOutputStream[] skipBuffer;
|
||||
|
||||
/** Creates a {@code MultiLevelSkipListWriter}. */
|
||||
protected MultiLevelSkipListWriter(int skipInterval, int skipMultiplier, int maxSkipLevels, int df) {
|
||||
this.skipInterval = skipInterval;
|
||||
this.skipMultiplier = skipMultiplier;
|
||||
|
@ -81,11 +82,14 @@ public abstract class MultiLevelSkipListWriter {
|
|||
}
|
||||
}
|
||||
|
||||
// skipMultiplier and skipInterval are the same:
|
||||
/** Creates a {@code MultiLevelSkipListWriter}, where
|
||||
* {@code skipInterval} and {@code skipMultiplier} are
|
||||
* the same. */
|
||||
protected MultiLevelSkipListWriter(int skipInterval, int maxSkipLevels, int df) {
|
||||
this(skipInterval, skipInterval, maxSkipLevels, df);
|
||||
}
|
||||
|
||||
/** Allocates internal skip buffers. */
|
||||
protected void init() {
|
||||
skipBuffer = new RAMOutputStream[numberOfSkipLevels];
|
||||
for (int i = 0; i < numberOfSkipLevels; i++) {
|
||||
|
@ -93,7 +97,7 @@ public abstract class MultiLevelSkipListWriter {
|
|||
}
|
||||
}
|
||||
|
||||
/** creates new buffers or empties the existing ones */
|
||||
/** Creates new buffers or empties the existing ones */
|
||||
protected void resetSkip() {
|
||||
if (skipBuffer == null) {
|
||||
init();
|
||||
|
|
|
@ -26,6 +26,10 @@ import org.apache.lucene.index.SegmentReadState;
|
|||
* format for normalization factors
|
||||
*/
|
||||
public abstract class NormsFormat {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected NormsFormat() {
|
||||
}
|
||||
|
||||
/** Returns a {@link PerDocConsumer} to write norms to the
|
||||
* index. */
|
||||
|
|
|
@ -35,6 +35,11 @@ import org.apache.lucene.index.DocValues.Type;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class PerDocConsumer implements Closeable {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected PerDocConsumer() {
|
||||
}
|
||||
|
||||
/** Adds a new DocValuesField */
|
||||
public abstract DocValuesConsumer addValuesField(DocValues.Type type, FieldInfo field)
|
||||
throws IOException;
|
||||
|
@ -104,4 +109,7 @@ public abstract class PerDocConsumer implements Closeable {
|
|||
* This method should cleanup all resources.
|
||||
*/
|
||||
public abstract void abort();
|
||||
|
||||
@Override
|
||||
public abstract void close() throws IOException;
|
||||
}
|
||||
|
|
|
@ -33,6 +33,11 @@ import org.apache.lucene.index.DocValues;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class PerDocProducer implements Closeable {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected PerDocProducer() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@link DocValues} for the current field.
|
||||
*
|
||||
|
@ -43,4 +48,7 @@ public abstract class PerDocProducer implements Closeable {
|
|||
* @throws IOException
|
||||
*/
|
||||
public abstract DocValues docValues(String field) throws IOException;
|
||||
|
||||
@Override
|
||||
public abstract void close() throws IOException;
|
||||
}
|
||||
|
|
|
@ -38,9 +38,17 @@ import org.apache.lucene.util.BytesRef;
|
|||
*/
|
||||
public abstract class PerDocProducerBase extends PerDocProducer {
|
||||
|
||||
/** Closes provided Closables. */
|
||||
protected abstract void closeInternal(Collection<? extends Closeable> closeables) throws IOException;
|
||||
|
||||
/** Returns a map, mapping field names to doc values. */
|
||||
protected abstract Map<String, DocValues> docValues();
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected PerDocProducerBase() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
closeInternal(docValues().values());
|
||||
|
@ -51,11 +59,12 @@ public abstract class PerDocProducerBase extends PerDocProducer {
|
|||
return docValues().get(field);
|
||||
}
|
||||
|
||||
/** Returns the comparator used to sort {@link BytesRef} values. */
|
||||
public Comparator<BytesRef> getComparator() throws IOException {
|
||||
return BytesRef.getUTF8SortedAsUnicodeComparator();
|
||||
}
|
||||
|
||||
// Only opens files... doesn't actually load any values
|
||||
/** Only opens files... doesn't actually load any values. */
|
||||
protected TreeMap<String, DocValues> load(FieldInfos fieldInfos,
|
||||
String segment, int docCount, Directory dir, IOContext context)
|
||||
throws IOException {
|
||||
|
@ -82,18 +91,23 @@ public abstract class PerDocProducerBase extends PerDocProducer {
|
|||
return values;
|
||||
}
|
||||
|
||||
/** Returns true if this field indexed doc values. */
|
||||
protected boolean canLoad(FieldInfo info) {
|
||||
return info.hasDocValues();
|
||||
}
|
||||
|
||||
/** Returns the doc values type for this field. */
|
||||
protected Type getDocValuesType(FieldInfo info) {
|
||||
return info.getDocValuesType();
|
||||
}
|
||||
|
||||
/** Returns true if any fields indexed doc values. */
|
||||
protected boolean anyDocValuesFields(FieldInfos infos) {
|
||||
return infos.hasDocValues();
|
||||
}
|
||||
|
||||
/** Returns the unique segment and field id for any
|
||||
* per-field files this implementation needs to write. */
|
||||
public static String docValuesId(String segmentsName, int fieldId) {
|
||||
return segmentsName + "_" + fieldId;
|
||||
}
|
||||
|
|
|
@ -40,11 +40,16 @@ public abstract class PostingsBaseFormat {
|
|||
* reading the index */
|
||||
public final String name;
|
||||
|
||||
/** Sole constructor. */
|
||||
protected PostingsBaseFormat(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/** Creates the {@link PostingsReaderBase} for this
|
||||
* format. */
|
||||
public abstract PostingsReaderBase postingsReaderBase(SegmentReadState state) throws IOException;
|
||||
|
||||
/** Creates the {@link PostingsWriterBase} for this
|
||||
* format. */
|
||||
public abstract PostingsWriterBase postingsWriterBase(SegmentWriteState state) throws IOException;
|
||||
}
|
||||
|
|
|
@ -49,6 +49,11 @@ import org.apache.lucene.util.FixedBitSet;
|
|||
*/
|
||||
public abstract class PostingsConsumer {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected PostingsConsumer() {
|
||||
}
|
||||
|
||||
/** Adds a new doc in this term.
|
||||
* <code>freq</code> will be -1 when term frequencies are omitted
|
||||
* for the field. */
|
||||
|
|
|
@ -42,7 +42,9 @@ public abstract class PostingsFormat implements NamedSPILoader.NamedSPI {
|
|||
private static final NamedSPILoader<PostingsFormat> loader =
|
||||
new NamedSPILoader<PostingsFormat>(PostingsFormat.class);
|
||||
|
||||
/** Zero-length {@code PostingsFormat} array. */
|
||||
public static final PostingsFormat[] EMPTY = new PostingsFormat[0];
|
||||
|
||||
/** Unique name that's used to retrieve this format when
|
||||
* reading the index.
|
||||
*/
|
||||
|
|
|
@ -41,6 +41,14 @@ import org.apache.lucene.util.Bits;
|
|||
// TermsDict + PostingsReader/WriterBase == PostingsConsumer/Producer
|
||||
public abstract class PostingsReaderBase implements Closeable {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected PostingsReaderBase() {
|
||||
}
|
||||
|
||||
/** Performs any initialization, such as reading and
|
||||
* verifying the header from the provided terms
|
||||
* dictionary {@link IndexInput}. */
|
||||
public abstract void init(IndexInput termsIn) throws IOException;
|
||||
|
||||
/** Return a newly created empty TermState */
|
||||
|
|
|
@ -40,8 +40,19 @@ import org.apache.lucene.index.FieldInfo;
|
|||
// TermsDict + PostingsReader/WriterBase == PostingsConsumer/Producer
|
||||
public abstract class PostingsWriterBase extends PostingsConsumer implements Closeable {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected PostingsWriterBase() {
|
||||
}
|
||||
|
||||
/** Called once after startup, before any terms have been
|
||||
* added. Implementations typically write a header to
|
||||
* the provided {@code termsOut}. */
|
||||
public abstract void start(IndexOutput termsOut) throws IOException;
|
||||
|
||||
/** Start a new term. Note that a matching call to {@link
|
||||
* #finishTerm(TermStats)} is done, only if the term has at least one
|
||||
* document. */
|
||||
public abstract void startTerm() throws IOException;
|
||||
|
||||
/** Flush count terms starting at start "backwards", as a
|
||||
|
@ -50,10 +61,13 @@ public abstract class PostingsWriterBase extends PostingsConsumer implements Clo
|
|||
* the stack. */
|
||||
public abstract void flushTermsBlock(int start, int count) throws IOException;
|
||||
|
||||
/** Finishes the current term */
|
||||
/** Finishes the current term. The provided {@link
|
||||
* TermStats} contains the term's summary statistics. */
|
||||
public abstract void finishTerm(TermStats stats) throws IOException;
|
||||
|
||||
/** Called when the writing switches to another field. */
|
||||
public abstract void setField(FieldInfo fieldInfo);
|
||||
|
||||
@Override
|
||||
public abstract void close() throws IOException;
|
||||
}
|
||||
|
|
|
@ -28,6 +28,16 @@ import org.apache.lucene.index.SegmentInfo;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class SegmentInfoFormat {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected SegmentInfoFormat() {
|
||||
}
|
||||
|
||||
/** Returns the {@link SegmentInfoReader} for reading
|
||||
* {@link SegmentInfo} instances. */
|
||||
public abstract SegmentInfoReader getSegmentInfoReader();
|
||||
|
||||
/** Returns the {@link SegmentInfoWriter} for writing
|
||||
* {@link SegmentInfo} instances. */
|
||||
public abstract SegmentInfoWriter getSegmentInfoWriter();
|
||||
}
|
||||
|
|
|
@ -30,6 +30,11 @@ import org.apache.lucene.store.IOContext;
|
|||
|
||||
public abstract class SegmentInfoReader {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected SegmentInfoReader() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Read {@link SegmentInfo} data from a directory.
|
||||
* @param directory directory to read from
|
||||
|
|
|
@ -30,6 +30,10 @@ import org.apache.lucene.store.IOContext;
|
|||
*/
|
||||
|
||||
public abstract class SegmentInfoWriter {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected SegmentInfoWriter() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Write {@link SegmentInfo} data.
|
||||
|
|
|
@ -28,6 +28,11 @@ import org.apache.lucene.store.IOContext;
|
|||
* Controls the format of stored fields
|
||||
*/
|
||||
public abstract class StoredFieldsFormat {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected StoredFieldsFormat() {
|
||||
}
|
||||
|
||||
/** Returns a {@link StoredFieldsReader} to load stored
|
||||
* fields. */
|
||||
public abstract StoredFieldsReader fieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context) throws IOException;
|
||||
|
|
|
@ -30,6 +30,10 @@ import org.apache.lucene.index.StoredFieldVisitor;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class StoredFieldsReader implements Cloneable, Closeable {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected StoredFieldsReader() {
|
||||
}
|
||||
|
||||
/** Visit the stored fields for document <code>n</code> */
|
||||
public abstract void visitDocument(int n, StoredFieldVisitor visitor) throws IOException;
|
||||
|
|
|
@ -44,6 +44,11 @@ import org.apache.lucene.index.AtomicReader;
|
|||
*/
|
||||
public abstract class StoredFieldsWriter implements Closeable {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected StoredFieldsWriter() {
|
||||
}
|
||||
|
||||
/** Called before writing the stored fields of the document.
|
||||
* {@link #writeField(FieldInfo, StorableField)} will be called
|
||||
* <code>numStoredFields</code> times. Note that this is
|
||||
|
@ -112,4 +117,6 @@ public abstract class StoredFieldsWriter implements Closeable {
|
|||
writeField(fieldInfos.fieldInfo(field.name()), field);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract void close() throws IOException;
|
||||
}
|
||||
|
|
|
@ -26,9 +26,15 @@ import org.apache.lucene.index.TermsEnum; // javadocs
|
|||
* @see TermsEnum#totalTermFreq
|
||||
*/
|
||||
public class TermStats {
|
||||
/** How many documents have at least one occurrence of
|
||||
* this term. */
|
||||
public final int docFreq;
|
||||
|
||||
/** Total number of times this term occurs across all
|
||||
* documents in the field. */
|
||||
public final long totalTermFreq;
|
||||
|
||||
/** Sole constructor. */
|
||||
public TermStats(int docFreq, long totalTermFreq) {
|
||||
this.docFreq = docFreq;
|
||||
this.totalTermFreq = totalTermFreq;
|
||||
|
|
|
@ -28,6 +28,11 @@ import org.apache.lucene.store.IOContext;
|
|||
* Controls the format of term vectors
|
||||
*/
|
||||
public abstract class TermVectorsFormat {
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected TermVectorsFormat() {
|
||||
}
|
||||
|
||||
/** Returns a {@link TermVectorsReader} to read term
|
||||
* vectors. */
|
||||
public abstract TermVectorsReader vectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context) throws IOException;
|
||||
|
|
|
@ -29,7 +29,12 @@ import org.apache.lucene.index.Fields;
|
|||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class TermVectorsReader implements Cloneable,Closeable {
|
||||
public abstract class TermVectorsReader implements Cloneable, Closeable {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected TermVectorsReader() {
|
||||
}
|
||||
|
||||
/** Returns term vectors for this document, or null if
|
||||
* term vectors were not indexed. If offsets are
|
||||
|
|
|
@ -59,6 +59,11 @@ import org.apache.lucene.util.BytesRef;
|
|||
*/
|
||||
public abstract class TermVectorsWriter implements Closeable {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected TermVectorsWriter() {
|
||||
}
|
||||
|
||||
/** Called before writing the term vectors of the document.
|
||||
* {@link #startField(FieldInfo, int, boolean, boolean, boolean)} will
|
||||
* be called <code>numVectorFields</code> times. Note that if term
|
||||
|
@ -279,4 +284,6 @@ public abstract class TermVectorsWriter implements Closeable {
|
|||
/** Return the BytesRef Comparator used to sort terms
|
||||
* before feeding to this API. */
|
||||
public abstract Comparator<BytesRef> getComparator() throws IOException;
|
||||
|
||||
public abstract void close() throws IOException;
|
||||
}
|
||||
|
|
|
@ -53,6 +53,11 @@ import org.apache.lucene.util.FixedBitSet;
|
|||
*/
|
||||
public abstract class TermsConsumer {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
* constructors, typically implicit.) */
|
||||
protected TermsConsumer() {
|
||||
}
|
||||
|
||||
/** Starts a new term in this field; this may be called
|
||||
* with no corresponding call to finish if the term had
|
||||
* no docs. */
|
||||
|
|
|
@ -57,6 +57,7 @@ public class Lucene40Codec extends Codec {
|
|||
}
|
||||
};
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40Codec() {
|
||||
super("Lucene40");
|
||||
}
|
||||
|
|
|
@ -36,9 +36,11 @@ public class Lucene40DocValuesConsumer extends DocValuesWriterBase {
|
|||
private final Directory mainDirectory;
|
||||
private Directory directory;
|
||||
private final String segmentSuffix;
|
||||
|
||||
/** Segment suffix used when writing doc values index files. */
|
||||
public final static String DOC_VALUES_SEGMENT_SUFFIX = "dv";
|
||||
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40DocValuesConsumer(PerDocWriteState state, String segmentSuffix) {
|
||||
super(state);
|
||||
this.segmentSuffix = segmentSuffix;
|
||||
|
|
|
@ -130,6 +130,10 @@ import org.apache.lucene.util.packed.PackedInts; // javadocs
|
|||
*/
|
||||
public class Lucene40DocValuesFormat extends DocValuesFormat {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40DocValuesFormat() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
|
||||
return new Lucene40DocValuesConsumer(state, Lucene40DocValuesConsumer.DOC_VALUES_SEGMENT_SUFFIX);
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public class Lucene40DocValuesProducer extends PerDocProducerBase {
|
||||
/** Maps field name to {@link DocValues} instance. */
|
||||
protected final TreeMap<String,DocValues> docValues;
|
||||
private final Directory cfs;
|
||||
/**
|
||||
|
|
|
@ -99,6 +99,10 @@ public class Lucene40FieldInfosFormat extends FieldInfosFormat {
|
|||
private final FieldInfosReader reader = new Lucene40FieldInfosReader();
|
||||
private final FieldInfosWriter writer = new Lucene40FieldInfosWriter();
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40FieldInfosFormat() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldInfosReader getFieldInfosReader() throws IOException {
|
||||
return reader;
|
||||
|
|
|
@ -1,21 +1,5 @@
|
|||
package org.apache.lucene.codecs.lucene40;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.codecs.FieldInfosReader;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
|
@ -33,6 +17,22 @@ import org.apache.lucene.store.IndexInput;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.codecs.FieldInfosReader;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
|
||||
/**
|
||||
* Lucene 4.0 FieldInfos reader.
|
||||
*
|
||||
|
@ -41,6 +41,10 @@ import org.apache.lucene.store.IndexInput;
|
|||
*/
|
||||
public class Lucene40FieldInfosReader extends FieldInfosReader {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40FieldInfosReader() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldInfos read(Directory directory, String segmentName, IOContext iocontext) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(segmentName, "", Lucene40FieldInfosWriter.FIELD_INFOS_EXTENSION);
|
||||
|
|
|
@ -21,10 +21,10 @@ import java.io.IOException;
|
|||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.codecs.FieldInfosWriter;
|
||||
import org.apache.lucene.index.DocValues.Type;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
|
@ -52,6 +52,10 @@ public class Lucene40FieldInfosWriter extends FieldInfosWriter {
|
|||
static final byte OMIT_TERM_FREQ_AND_POSITIONS = 0x40;
|
||||
static final byte OMIT_POSITIONS = -128;
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40FieldInfosWriter() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(Directory directory, String segmentName, FieldInfos infos, IOContext context) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(segmentName, "", FIELD_INFOS_EXTENSION);
|
||||
|
@ -93,6 +97,8 @@ public class Lucene40FieldInfosWriter extends FieldInfosWriter {
|
|||
}
|
||||
}
|
||||
|
||||
/** Returns the byte used to encode the {@link
|
||||
* Type} for each field. */
|
||||
public byte docValuesByte(Type type) {
|
||||
if (type == null) {
|
||||
return 0;
|
||||
|
|
|
@ -67,6 +67,10 @@ public class Lucene40LiveDocsFormat extends LiveDocsFormat {
|
|||
/** Extension of deletes */
|
||||
static final String DELETES_EXTENSION = "del";
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40LiveDocsFormat() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public MutableBits newLiveDocs(int size) throws IOException {
|
||||
BitVector bitVector = new BitVector(size);
|
||||
|
|
|
@ -48,6 +48,10 @@ import org.apache.lucene.store.CompoundFileDirectory; // javadocs
|
|||
public class Lucene40NormsFormat extends NormsFormat {
|
||||
private final static String NORMS_SEGMENT_SUFFIX = "nrm";
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40NormsFormat() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
|
||||
return new Lucene40NormsDocValuesConsumer(state, NORMS_SEGMENT_SUFFIX);
|
||||
|
@ -65,6 +69,7 @@ public class Lucene40NormsFormat extends NormsFormat {
|
|||
*/
|
||||
public static class Lucene40NormsDocValuesProducer extends Lucene40DocValuesProducer {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40NormsDocValuesProducer(SegmentReadState state,
|
||||
String segmentSuffix) throws IOException {
|
||||
super(state, segmentSuffix);
|
||||
|
@ -95,6 +100,7 @@ public class Lucene40NormsFormat extends NormsFormat {
|
|||
*/
|
||||
public static class Lucene40NormsDocValuesConsumer extends Lucene40DocValuesConsumer {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40NormsDocValuesConsumer(PerDocWriteState state,
|
||||
String segmentSuffix) {
|
||||
super(state, segmentSuffix);
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.index.SegmentWriteState;
|
|||
// TODO: should these also be named / looked up via SPI?
|
||||
public final class Lucene40PostingsBaseFormat extends PostingsBaseFormat {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40PostingsBaseFormat() {
|
||||
super("Lucene40");
|
||||
}
|
||||
|
|
|
@ -274,10 +274,16 @@ public final class Lucene40PostingsFormat extends PostingsFormat {
|
|||
private final int minBlockSize;
|
||||
private final int maxBlockSize;
|
||||
|
||||
/** Creates {@code Lucene40PostingsFormat} with default
|
||||
* settings. */
|
||||
public Lucene40PostingsFormat() {
|
||||
this(BlockTreeTermsWriter.DEFAULT_MIN_BLOCK_SIZE, BlockTreeTermsWriter.DEFAULT_MAX_BLOCK_SIZE);
|
||||
}
|
||||
|
||||
/** Creates {@code Lucene40PostingsFormat} with custom
|
||||
* values for {@code minBlockSize} and {@code
|
||||
* maxBlockSize} passed to block terms dictionary.
|
||||
* @see BlockTreeTermsWriter#BlockTreeTermsWriter(SegmentWriteState,PostingsWriterBase,int,int) */
|
||||
public Lucene40PostingsFormat(int minBlockSize, int maxBlockSize) {
|
||||
super("Lucene40");
|
||||
this.minBlockSize = minBlockSize;
|
||||
|
|
|
@ -59,6 +59,7 @@ public class Lucene40PostingsReader extends PostingsReaderBase {
|
|||
|
||||
// private String segment;
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo segmentInfo, IOContext ioContext, String segmentSuffix) throws IOException {
|
||||
boolean success = false;
|
||||
IndexInput freqIn = null;
|
||||
|
|
|
@ -92,10 +92,14 @@ public final class Lucene40PostingsWriter extends PostingsWriterBase {
|
|||
|
||||
// private String segment;
|
||||
|
||||
/** Creates a {@link Lucene40PostingsWriter}, with the
|
||||
* {@link #DEFAULT_SKIP_INTERVAL}. */
|
||||
public Lucene40PostingsWriter(SegmentWriteState state) throws IOException {
|
||||
this(state, DEFAULT_SKIP_INTERVAL);
|
||||
}
|
||||
|
||||
/** Creates a {@link Lucene40PostingsWriter}, with the
|
||||
* specified {@code skipInterval}. */
|
||||
public Lucene40PostingsWriter(SegmentWriteState state, int skipInterval) throws IOException {
|
||||
super();
|
||||
this.skipInterval = skipInterval;
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.codecs.SegmentInfoFormat;
|
|||
import org.apache.lucene.codecs.SegmentInfoReader;
|
||||
import org.apache.lucene.codecs.SegmentInfoWriter;
|
||||
import org.apache.lucene.index.IndexWriter; // javadocs
|
||||
import org.apache.lucene.index.SegmentInfo; // javadocs
|
||||
import org.apache.lucene.index.SegmentInfos; // javadocs
|
||||
import org.apache.lucene.store.DataOutput; // javadocs
|
||||
|
||||
|
@ -71,6 +72,10 @@ public class Lucene40SegmentInfoFormat extends SegmentInfoFormat {
|
|||
private final SegmentInfoReader reader = new Lucene40SegmentInfoReader();
|
||||
private final SegmentInfoWriter writer = new Lucene40SegmentInfoWriter();
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40SegmentInfoFormat() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfoReader getSegmentInfoReader() {
|
||||
return reader;
|
||||
|
@ -81,6 +86,7 @@ public class Lucene40SegmentInfoFormat extends SegmentInfoFormat {
|
|||
return writer;
|
||||
}
|
||||
|
||||
/** File extension used to store {@link SegmentInfo}. */
|
||||
public final static String SI_EXTENSION = "si";
|
||||
static final String CODEC_NAME = "Lucene40SegmentInfo";
|
||||
static final int VERSION_START = 0;
|
||||
|
|
|
@ -40,6 +40,10 @@ import org.apache.lucene.util.IOUtils;
|
|||
*/
|
||||
public class Lucene40SegmentInfoReader extends SegmentInfoReader {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40SegmentInfoReader() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfo read(Directory dir, String segment, IOContext context) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene40SegmentInfoFormat.SI_EXTENSION);
|
||||
|
|
|
@ -37,6 +37,10 @@ import org.apache.lucene.util.IOUtils;
|
|||
*/
|
||||
public class Lucene40SegmentInfoWriter extends SegmentInfoWriter {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40SegmentInfoWriter() {
|
||||
}
|
||||
|
||||
/** Save a single segment's info. */
|
||||
@Override
|
||||
public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException {
|
||||
|
|
|
@ -43,7 +43,7 @@ public class Lucene40SkipListReader extends MultiLevelSkipListReader {
|
|||
private int lastPayloadLength;
|
||||
private int lastOffsetLength;
|
||||
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40SkipListReader(IndexInput skipStream, int maxSkipLevels, int skipInterval) {
|
||||
super(skipStream, maxSkipLevels, skipInterval);
|
||||
freqPointer = new long[maxSkipLevels];
|
||||
|
@ -52,6 +52,7 @@ public class Lucene40SkipListReader extends MultiLevelSkipListReader {
|
|||
offsetLength = new int[maxSkipLevels];
|
||||
}
|
||||
|
||||
/** Per-term initialization. */
|
||||
public void init(long skipPointer, long freqBasePointer, long proxBasePointer, int df, boolean storesPayloads, boolean storesOffsets) {
|
||||
super.init(skipPointer, df);
|
||||
this.currentFieldStoresPayloads = storesPayloads;
|
||||
|
|
|
@ -49,6 +49,7 @@ public class Lucene40SkipListWriter extends MultiLevelSkipListWriter {
|
|||
private long curFreqPointer;
|
||||
private long curProxPointer;
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40SkipListWriter(int skipInterval, int numberOfSkipLevels, int docCount, IndexOutput freqOutput, IndexOutput proxOutput) {
|
||||
super(skipInterval, numberOfSkipLevels, docCount);
|
||||
this.freqOutput = freqOutput;
|
||||
|
|
|
@ -81,6 +81,10 @@ import org.apache.lucene.store.IOContext;
|
|||
* @lucene.experimental */
|
||||
public class Lucene40StoredFieldsFormat extends StoredFieldsFormat {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40StoredFieldsFormat() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public StoredFieldsReader fieldsReader(Directory directory, SegmentInfo si,
|
||||
FieldInfos fn, IOContext context) throws IOException {
|
||||
|
|
|
@ -64,7 +64,7 @@ public final class Lucene40StoredFieldsReader extends StoredFieldsReader impleme
|
|||
return new Lucene40StoredFieldsReader(fieldInfos, numTotalDocs, size, fieldsStream.clone(), indexStream.clone());
|
||||
}
|
||||
|
||||
// Used only by clone
|
||||
/** Used only by clone. */
|
||||
private Lucene40StoredFieldsReader(FieldInfos fieldInfos, int numTotalDocs, int size, IndexInput fieldsStream, IndexInput indexStream) {
|
||||
this.fieldInfos = fieldInfos;
|
||||
this.numTotalDocs = numTotalDocs;
|
||||
|
@ -73,6 +73,7 @@ public final class Lucene40StoredFieldsReader extends StoredFieldsReader impleme
|
|||
this.indexStream = indexStream;
|
||||
}
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40StoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IOContext context) throws IOException {
|
||||
final String segment = si.name;
|
||||
boolean success = false;
|
||||
|
@ -128,6 +129,7 @@ public final class Lucene40StoredFieldsReader extends StoredFieldsReader impleme
|
|||
}
|
||||
}
|
||||
|
||||
/** Returns number of documents. */
|
||||
public final int size() {
|
||||
return size;
|
||||
}
|
||||
|
@ -136,6 +138,7 @@ public final class Lucene40StoredFieldsReader extends StoredFieldsReader impleme
|
|||
indexStream.seek(HEADER_LENGTH_IDX + docID * 8L);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void visitDocument(int n, StoredFieldVisitor visitor) throws IOException {
|
||||
seekIndex(n);
|
||||
fieldsStream.seek(indexStream.readLong());
|
||||
|
|
|
@ -16,6 +16,7 @@ package org.apache.lucene.codecs.lucene40;
|
|||
* the License.
|
||||
*/
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
|
@ -85,6 +86,7 @@ public final class Lucene40StoredFieldsWriter extends StoredFieldsWriter {
|
|||
private IndexOutput fieldsStream;
|
||||
private IndexOutput indexStream;
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40StoredFieldsWriter(Directory directory, String segment, IOContext context) throws IOException {
|
||||
assert directory != null;
|
||||
this.directory = directory;
|
||||
|
|
|
@ -115,6 +115,10 @@ import org.apache.lucene.store.IOContext;
|
|||
*/
|
||||
public class Lucene40TermVectorsFormat extends TermVectorsFormat {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40TermVectorsFormat() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermVectorsReader vectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context) throws IOException {
|
||||
return new Lucene40TermVectorsReader(directory, segmentInfo, fieldInfos, context);
|
||||
|
|
|
@ -17,6 +17,7 @@ package org.apache.lucene.codecs.lucene40;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
|
@ -50,7 +51,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
*
|
||||
* @see Lucene40TermVectorsFormat
|
||||
*/
|
||||
public class Lucene40TermVectorsReader extends TermVectorsReader {
|
||||
public class Lucene40TermVectorsReader extends TermVectorsReader implements Closeable {
|
||||
|
||||
static final byte STORE_POSITIONS_WITH_TERMVECTOR = 0x1;
|
||||
|
||||
|
@ -89,7 +90,7 @@ public class Lucene40TermVectorsReader extends TermVectorsReader {
|
|||
private int numTotalDocs;
|
||||
|
||||
|
||||
// used by clone
|
||||
/** Used by clone. */
|
||||
Lucene40TermVectorsReader(FieldInfos fieldInfos, IndexInput tvx, IndexInput tvd, IndexInput tvf, int size, int numTotalDocs) {
|
||||
this.fieldInfos = fieldInfos;
|
||||
this.tvx = tvx;
|
||||
|
@ -99,6 +100,7 @@ public class Lucene40TermVectorsReader extends TermVectorsReader {
|
|||
this.numTotalDocs = numTotalDocs;
|
||||
}
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40TermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldInfos, IOContext context)
|
||||
throws IOException {
|
||||
final String segment = si.name;
|
||||
|
@ -202,6 +204,7 @@ public class Lucene40TermVectorsReader extends TermVectorsReader {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
IOUtils.close(tvx, tvd, tvf);
|
||||
}
|
||||
|
|
|
@ -64,8 +64,7 @@ public final class Lucene40TermVectorsWriter extends TermVectorsWriter {
|
|||
private final String segment;
|
||||
private IndexOutput tvx = null, tvd = null, tvf = null;
|
||||
|
||||
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40TermVectorsWriter(Directory directory, String segment, IOContext context) throws IOException {
|
||||
this.directory = directory;
|
||||
this.segment = segment;
|
||||
|
|
|
@ -39,9 +39,13 @@ import org.apache.lucene.util.packed.PackedInts;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class DocValuesWriterBase extends PerDocConsumer {
|
||||
/** Segment name to use when writing files. */
|
||||
protected final String segmentName;
|
||||
private final Counter bytesUsed;
|
||||
|
||||
/** {@link IOContext} to use when writing files. */
|
||||
protected final IOContext context;
|
||||
|
||||
private final float acceptableOverheadRatio;
|
||||
|
||||
/**
|
||||
|
@ -55,6 +59,8 @@ public abstract class DocValuesWriterBase extends PerDocConsumer {
|
|||
public static final String DATA_EXTENSION = "dat";
|
||||
|
||||
/**
|
||||
* Creates {@code DocValuesWriterBase}, using {@link
|
||||
* PackedInts#FAST}.
|
||||
* @param state The state to initiate a {@link PerDocConsumer} instance
|
||||
*/
|
||||
protected DocValuesWriterBase(PerDocWriteState state) {
|
||||
|
@ -62,6 +68,7 @@ public abstract class DocValuesWriterBase extends PerDocConsumer {
|
|||
}
|
||||
|
||||
/**
|
||||
* Creates {@code DocValuesWriterBase}.
|
||||
* @param state The state to initiate a {@link PerDocConsumer} instance
|
||||
* @param acceptableOverheadRatio
|
||||
* how to trade space for speed. This option is only applicable for
|
||||
|
@ -76,6 +83,8 @@ public abstract class DocValuesWriterBase extends PerDocConsumer {
|
|||
this.acceptableOverheadRatio = acceptableOverheadRatio;
|
||||
}
|
||||
|
||||
/** Returns the {@link Directory} that files should be
|
||||
* written to. */
|
||||
protected abstract Directory getDirectory() throws IOException;
|
||||
|
||||
@Override
|
||||
|
@ -90,6 +99,8 @@ public abstract class DocValuesWriterBase extends PerDocConsumer {
|
|||
}
|
||||
|
||||
|
||||
/** Returns the comparator used to sort {@link BytesRef}
|
||||
* values. */
|
||||
public Comparator<BytesRef> getComparator() throws IOException {
|
||||
return BytesRef.getUTF8SortedAsUnicodeComparator();
|
||||
}
|
||||
|
|
|
@ -43,15 +43,28 @@ import org.apache.lucene.util.IOUtils;
|
|||
*/
|
||||
public class Floats {
|
||||
|
||||
/** Codec name, written in the header. */
|
||||
protected static final String CODEC_NAME = "Floats";
|
||||
|
||||
/** Initial version. */
|
||||
protected static final int VERSION_START = 0;
|
||||
|
||||
/** Current version. */
|
||||
protected static final int VERSION_CURRENT = VERSION_START;
|
||||
|
||||
/** Sole constructor. */
|
||||
private Floats() {
|
||||
}
|
||||
|
||||
/** Creates and returns a {@link DocValuesConsumer} to
|
||||
* write float values. */
|
||||
public static DocValuesConsumer getWriter(Directory dir, String id, Counter bytesUsed,
|
||||
IOContext context, Type type) {
|
||||
return new FloatsWriter(dir, id, bytesUsed, context, type);
|
||||
}
|
||||
|
||||
/** Creates and returns a {@link DocValues} to
|
||||
* read previously written float values. */
|
||||
public static DocValues getValues(Directory dir, String id, int maxDoc, IOContext context, Type type)
|
||||
throws IOException {
|
||||
return new FloatsReader(dir, id, maxDoc, context, type);
|
||||
|
|
|
@ -39,19 +39,29 @@ import org.apache.lucene.util.IOUtils;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public final class Ints {
|
||||
/** Codec name, written in the header. */
|
||||
protected static final String CODEC_NAME = "Ints";
|
||||
|
||||
/** Initial version. */
|
||||
protected static final int VERSION_START = 0;
|
||||
|
||||
/** Current version. */
|
||||
protected static final int VERSION_CURRENT = VERSION_START;
|
||||
|
||||
/** Sole constructor. */
|
||||
private Ints() {
|
||||
}
|
||||
|
||||
/** Creates and returns a {@link DocValuesConsumer} to
|
||||
* write int values. */
|
||||
public static DocValuesConsumer getWriter(Directory dir, String id, Counter bytesUsed,
|
||||
Type type, IOContext context) {
|
||||
return type == Type.VAR_INTS ? new PackedIntValues.PackedIntsWriter(dir, id,
|
||||
bytesUsed, context) : new IntsWriter(dir, id, bytesUsed, context, type);
|
||||
}
|
||||
|
||||
/** Creates and returns a {@link DocValues} to
|
||||
* read previously written int values. */
|
||||
public static DocValues getValues(Directory dir, String id, int numDocs,
|
||||
Type type, IOContext context) throws IOException {
|
||||
return type == Type.VAR_INTS ? new PackedIntValues.PackedIntsReader(dir, id,
|
||||
|
|
|
@ -53,11 +53,19 @@ import org.apache.lucene.util.IOUtils;
|
|||
*/
|
||||
|
||||
public abstract class PerFieldPostingsFormat extends PostingsFormat {
|
||||
/** Name of this {@link PostingsFormat}. */
|
||||
public static final String PER_FIELD_NAME = "PerField40";
|
||||
|
||||
/** {@link FieldInfo} attribute name used to store the
|
||||
* format name for each field. */
|
||||
public static final String PER_FIELD_FORMAT_KEY = PerFieldPostingsFormat.class.getSimpleName() + ".format";
|
||||
|
||||
/** {@link FieldInfo} attribute name used to store the
|
||||
* segment suffix name for each field. */
|
||||
public static final String PER_FIELD_SUFFIX_KEY = PerFieldPostingsFormat.class.getSimpleName() + ".suffix";
|
||||
|
||||
|
||||
/** Sole constructor. */
|
||||
public PerFieldPostingsFormat() {
|
||||
super(PER_FIELD_NAME);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue