mirror of https://github.com/apache/lucene.git
fix all malformed javadocs (@code/deprecated/param/returns/throws with no args)
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1389133 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
2556b0de79
commit
f025322f81
|
@ -1,4 +1,4 @@
|
||||||
#Wed Sep 19 20:37:34 EDT 2012
|
#Sun Sep 23 13:02:27 EDT 2012
|
||||||
eclipse.preferences.version=1
|
eclipse.preferences.version=1
|
||||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
|
||||||
org.eclipse.jdt.core.compiler.compliance=1.6
|
org.eclipse.jdt.core.compiler.compliance=1.6
|
||||||
|
@ -13,7 +13,7 @@ org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsVisibility=private
|
||||||
org.eclipse.jdt.core.compiler.problem.missingJavadocComments=ignore
|
org.eclipse.jdt.core.compiler.problem.missingJavadocComments=ignore
|
||||||
org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsOverriding=disabled
|
org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsOverriding=disabled
|
||||||
org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsVisibility=public
|
org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsVisibility=public
|
||||||
org.eclipse.jdt.core.compiler.problem.missingJavadocTagDescription=return_tag
|
org.eclipse.jdt.core.compiler.problem.missingJavadocTagDescription=all_standard_tags
|
||||||
org.eclipse.jdt.core.compiler.problem.missingJavadocTags=ignore
|
org.eclipse.jdt.core.compiler.problem.missingJavadocTags=ignore
|
||||||
org.eclipse.jdt.core.compiler.problem.missingJavadocTagsMethodTypeParameters=disabled
|
org.eclipse.jdt.core.compiler.problem.missingJavadocTagsMethodTypeParameters=disabled
|
||||||
org.eclipse.jdt.core.compiler.problem.missingJavadocTagsOverriding=disabled
|
org.eclipse.jdt.core.compiler.problem.missingJavadocTagsOverriding=disabled
|
||||||
|
|
|
@ -30792,7 +30792,8 @@ public final class HTMLStripCharFilter extends BaseCharFilter {
|
||||||
private TextSegment entitySegment = new TextSegment(2);
|
private TextSegment entitySegment = new TextSegment(2);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param source
|
* Creates a new HTMLStripCharFilter over the provided Reader.
|
||||||
|
* @param source Reader to strip html tags from.
|
||||||
*/
|
*/
|
||||||
public HTMLStripCharFilter(Reader source) {
|
public HTMLStripCharFilter(Reader source) {
|
||||||
super(source);
|
super(source);
|
||||||
|
@ -30800,7 +30801,9 @@ public final class HTMLStripCharFilter extends BaseCharFilter {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param source
|
* Creates a new HTMLStripCharFilter over the provided Reader
|
||||||
|
* with the specified start and end tags.
|
||||||
|
* @param source Reader to strip html tags from.
|
||||||
* @param escapedTags Tags in this set (both start and end tags)
|
* @param escapedTags Tags in this set (both start and end tags)
|
||||||
* will not be filtered out.
|
* will not be filtered out.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -171,7 +171,8 @@ InlineElment = ( [aAbBiIqQsSuU] |
|
||||||
private TextSegment entitySegment = new TextSegment(2);
|
private TextSegment entitySegment = new TextSegment(2);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param source
|
* Creates a new HTMLStripCharFilter over the provided Reader.
|
||||||
|
* @param source Reader to strip html tags from.
|
||||||
*/
|
*/
|
||||||
public HTMLStripCharFilter(Reader source) {
|
public HTMLStripCharFilter(Reader source) {
|
||||||
super(source);
|
super(source);
|
||||||
|
@ -179,7 +180,9 @@ InlineElment = ( [aAbBiIqQsSuU] |
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param source
|
* Creates a new HTMLStripCharFilter over the provided Reader
|
||||||
|
* with the specified start and end tags.
|
||||||
|
* @param source Reader to strip html tags from.
|
||||||
* @param escapedTags Tags in this set (both start and end tags)
|
* @param escapedTags Tags in this set (both start and end tags)
|
||||||
* will not be filtered out.
|
* will not be filtered out.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -132,7 +132,7 @@ public class HyphenationCompoundWordTokenFilter extends
|
||||||
*
|
*
|
||||||
* @param hyphenationFilename the filename of the XML grammar to load
|
* @param hyphenationFilename the filename of the XML grammar to load
|
||||||
* @return An object representing the hyphenation patterns
|
* @return An object representing the hyphenation patterns
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public static HyphenationTree getHyphenationTree(String hyphenationFilename)
|
public static HyphenationTree getHyphenationTree(String hyphenationFilename)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -144,7 +144,7 @@ public class HyphenationCompoundWordTokenFilter extends
|
||||||
*
|
*
|
||||||
* @param hyphenationFile the file of the XML grammar to load
|
* @param hyphenationFile the file of the XML grammar to load
|
||||||
* @return An object representing the hyphenation patterns
|
* @return An object representing the hyphenation patterns
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public static HyphenationTree getHyphenationTree(File hyphenationFile)
|
public static HyphenationTree getHyphenationTree(File hyphenationFile)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -156,7 +156,7 @@ public class HyphenationCompoundWordTokenFilter extends
|
||||||
*
|
*
|
||||||
* @param hyphenationSource the InputSource pointing to the XML grammar
|
* @param hyphenationSource the InputSource pointing to the XML grammar
|
||||||
* @return An object representing the hyphenation patterns
|
* @return An object representing the hyphenation patterns
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public static HyphenationTree getHyphenationTree(InputSource hyphenationSource)
|
public static HyphenationTree getHyphenationTree(InputSource hyphenationSource)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
|
@ -44,8 +44,8 @@ public final class KStemFilter extends TokenFilter {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns the next, stemmed, input Token.
|
/** Returns the next, stemmed, input Token.
|
||||||
* @return The stemed form of a token.
|
* @return The stemmed form of a token.
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean incrementToken() throws IOException {
|
public boolean incrementToken() throws IOException {
|
||||||
|
|
|
@ -351,7 +351,7 @@ public final class WordDelimiterFilter extends TokenFilter {
|
||||||
* Flushes the given WordDelimiterConcatenation by either writing its concat and then clearing, or just clearing.
|
* Flushes the given WordDelimiterConcatenation by either writing its concat and then clearing, or just clearing.
|
||||||
*
|
*
|
||||||
* @param concatenation WordDelimiterConcatenation that will be flushed
|
* @param concatenation WordDelimiterConcatenation that will be flushed
|
||||||
* @return {@code true} if the concatenation was written before it was cleared, {@code} false otherwise
|
* @return {@code true} if the concatenation was written before it was cleared, {@code false} otherwise
|
||||||
*/
|
*/
|
||||||
private boolean flushConcatenation(WordDelimiterConcatenation concatenation) {
|
private boolean flushConcatenation(WordDelimiterConcatenation concatenation) {
|
||||||
lastConcatCount = concatenation.subwordCount;
|
lastConcatCount = concatenation.subwordCount;
|
||||||
|
@ -493,7 +493,7 @@ public final class WordDelimiterFilter extends TokenFilter {
|
||||||
* Determines whether the given flag is set
|
* Determines whether the given flag is set
|
||||||
*
|
*
|
||||||
* @param flag Flag to see if set
|
* @param flag Flag to see if set
|
||||||
* @return {@code} true if flag is set
|
* @return {@code true} if flag is set
|
||||||
*/
|
*/
|
||||||
private boolean has(int flag) {
|
private boolean has(int flag) {
|
||||||
return (flags & flag) != 0;
|
return (flags & flag) != 0;
|
||||||
|
|
|
@ -33,9 +33,6 @@ public interface PayloadEncoder {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert a char array to a {@link BytesRef}
|
* Convert a char array to a {@link BytesRef}
|
||||||
* @param buffer
|
|
||||||
* @param offset
|
|
||||||
* @param length
|
|
||||||
* @return encoded {@link BytesRef}
|
* @return encoded {@link BytesRef}
|
||||||
*/
|
*/
|
||||||
BytesRef encode(char [] buffer, int offset, int length);
|
BytesRef encode(char [] buffer, int offset, int length);
|
||||||
|
|
|
@ -44,7 +44,6 @@ public class PayloadHelper {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param bytes
|
|
||||||
* @see #decodeFloat(byte[], int)
|
* @see #decodeFloat(byte[], int)
|
||||||
* @see #encodeFloat(float)
|
* @see #encodeFloat(float)
|
||||||
* @return the decoded float
|
* @return the decoded float
|
||||||
|
|
|
@ -184,7 +184,7 @@ public class WordlistLoader {
|
||||||
* (i.e. two tab separated words)
|
* (i.e. two tab separated words)
|
||||||
*
|
*
|
||||||
* @return stem dictionary that overrules the stemming algorithm
|
* @return stem dictionary that overrules the stemming algorithm
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public static CharArrayMap<String> getStemDict(Reader reader, CharArrayMap<String> result) throws IOException {
|
public static CharArrayMap<String> getStemDict(Reader reader, CharArrayMap<String> result) throws IOException {
|
||||||
BufferedReader br = null;
|
BufferedReader br = null;
|
||||||
|
@ -210,7 +210,7 @@ public class WordlistLoader {
|
||||||
* </p>
|
* </p>
|
||||||
*
|
*
|
||||||
* @return a list of non-blank non-comment lines with whitespace trimmed
|
* @return a list of non-blank non-comment lines with whitespace trimmed
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public static List<String> getLines(InputStream stream, Charset charset) throws IOException{
|
public static List<String> getLines(InputStream stream, Charset charset) throws IOException{
|
||||||
BufferedReader input = null;
|
BufferedReader input = null;
|
||||||
|
|
|
@ -140,7 +140,6 @@ public final class WikipediaTokenizer extends Tokenizer {
|
||||||
*
|
*
|
||||||
* @param input The input
|
* @param input The input
|
||||||
* @param tokenOutput One of {@link #TOKENS_ONLY}, {@link #UNTOKENIZED_ONLY}, {@link #BOTH}
|
* @param tokenOutput One of {@link #TOKENS_ONLY}, {@link #UNTOKENIZED_ONLY}, {@link #BOTH}
|
||||||
* @param untokenizedTypes
|
|
||||||
*/
|
*/
|
||||||
public WikipediaTokenizer(Reader input, int tokenOutput, Set<String> untokenizedTypes) {
|
public WikipediaTokenizer(Reader input, int tokenOutput, Set<String> untokenizedTypes) {
|
||||||
super(input);
|
super(input);
|
||||||
|
@ -154,7 +153,6 @@ public final class WikipediaTokenizer extends Tokenizer {
|
||||||
*
|
*
|
||||||
* @param input The input
|
* @param input The input
|
||||||
* @param tokenOutput One of {@link #TOKENS_ONLY}, {@link #UNTOKENIZED_ONLY}, {@link #BOTH}
|
* @param tokenOutput One of {@link #TOKENS_ONLY}, {@link #UNTOKENIZED_ONLY}, {@link #BOTH}
|
||||||
* @param untokenizedTypes
|
|
||||||
*/
|
*/
|
||||||
public WikipediaTokenizer(AttributeFactory factory, Reader input, int tokenOutput, Set<String> untokenizedTypes) {
|
public WikipediaTokenizer(AttributeFactory factory, Reader input, int tokenOutput, Set<String> untokenizedTypes) {
|
||||||
super(factory, input);
|
super(factory, input);
|
||||||
|
@ -168,7 +166,6 @@ public final class WikipediaTokenizer extends Tokenizer {
|
||||||
*
|
*
|
||||||
* @param input The input
|
* @param input The input
|
||||||
* @param tokenOutput One of {@link #TOKENS_ONLY}, {@link #UNTOKENIZED_ONLY}, {@link #BOTH}
|
* @param tokenOutput One of {@link #TOKENS_ONLY}, {@link #UNTOKENIZED_ONLY}, {@link #BOTH}
|
||||||
* @param untokenizedTypes
|
|
||||||
*/
|
*/
|
||||||
public WikipediaTokenizer(AttributeSource source, Reader input, int tokenOutput, Set<String> untokenizedTypes) {
|
public WikipediaTokenizer(AttributeSource source, Reader input, int tokenOutput, Set<String> untokenizedTypes) {
|
||||||
super(source, input);
|
super(source, input);
|
||||||
|
|
|
@ -171,8 +171,6 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Not an explicit test, just useful to print out some info on performance
|
* Not an explicit test, just useful to print out some info on performance
|
||||||
*
|
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void performance() throws Exception {
|
public void performance() throws Exception {
|
||||||
int[] tokCount = {100, 500, 1000, 2000, 5000, 10000};
|
int[] tokCount = {100, 500, 1000, 2000, 5000, 10000};
|
||||||
|
|
|
@ -139,7 +139,7 @@ public final class ICUTokenizer extends Tokenizer {
|
||||||
* Refill the buffer, accumulating the offset and setting usableLength to the
|
* Refill the buffer, accumulating the offset and setting usableLength to the
|
||||||
* last unambiguous break position
|
* last unambiguous break position
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
private void refill() throws IOException {
|
private void refill() throws IOException {
|
||||||
offset += usableLength;
|
offset += usableLength;
|
||||||
|
|
|
@ -225,7 +225,7 @@ public class JapaneseIterationMarkCharFilter extends CharFilter {
|
||||||
*
|
*
|
||||||
* @param c iteration mark character to normalize
|
* @param c iteration mark character to normalize
|
||||||
* @return normalized iteration mark
|
* @return normalized iteration mark
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
private char normalizeIterationMark(char c) throws IOException {
|
private char normalizeIterationMark(char c) throws IOException {
|
||||||
|
|
||||||
|
@ -252,7 +252,7 @@ public class JapaneseIterationMarkCharFilter extends CharFilter {
|
||||||
* Finds the number of subsequent next iteration marks
|
* Finds the number of subsequent next iteration marks
|
||||||
*
|
*
|
||||||
* @return number of iteration marks starting at the current buffer position
|
* @return number of iteration marks starting at the current buffer position
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
private int nextIterationMarkSpanSize() throws IOException {
|
private int nextIterationMarkSpanSize() throws IOException {
|
||||||
int spanSize = 0;
|
int spanSize = 0;
|
||||||
|
@ -272,7 +272,7 @@ public class JapaneseIterationMarkCharFilter extends CharFilter {
|
||||||
* @param position buffer position (should not exceed bufferPosition)
|
* @param position buffer position (should not exceed bufferPosition)
|
||||||
* @param spanSize iteration mark span size
|
* @param spanSize iteration mark span size
|
||||||
* @return source character
|
* @return source character
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
private char sourceCharacter(int position, int spanSize) throws IOException {
|
private char sourceCharacter(int position, int spanSize) throws IOException {
|
||||||
return (char) buffer.get(position - spanSize);
|
return (char) buffer.get(position - spanSize);
|
||||||
|
|
|
@ -27,22 +27,19 @@ public interface Dictionary {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get left id of specified word
|
* Get left id of specified word
|
||||||
* @param wordId
|
|
||||||
* @return left id
|
* @return left id
|
||||||
*/
|
*/
|
||||||
public int getLeftId(int wordId);
|
public int getLeftId(int wordId);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get right id of specified word
|
* Get right id of specified word
|
||||||
* @param wordId
|
* @return right id
|
||||||
* @return left id
|
|
||||||
*/
|
*/
|
||||||
public int getRightId(int wordId);
|
public int getRightId(int wordId);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get word cost of specified word
|
* Get word cost of specified word
|
||||||
* @param wordId
|
* @return word's cost
|
||||||
* @return left id
|
|
||||||
*/
|
*/
|
||||||
public int getWordCost(int wordId);
|
public int getWordCost(int wordId);
|
||||||
|
|
||||||
|
|
|
@ -172,7 +172,6 @@ public final class UserDictionary implements Dictionary {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert Map of index and wordIdAndLength to array of {wordId, index, length}
|
* Convert Map of index and wordIdAndLength to array of {wordId, index, length}
|
||||||
* @param input
|
|
||||||
* @return array of {wordId, index, length}
|
* @return array of {wordId, index, length}
|
||||||
*/
|
*/
|
||||||
private int[][] toIndexArray(Map<Integer, int[]> input) {
|
private int[][] toIndexArray(Map<Integer, int[]> input) {
|
||||||
|
|
|
@ -37,7 +37,7 @@ public final class CSVUtil {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse CSV line
|
* Parse CSV line
|
||||||
* @param line
|
* @param line line containing csv-encoded data
|
||||||
* @return Array of values
|
* @return Array of values
|
||||||
*/
|
*/
|
||||||
public static String[] parse(String line) {
|
public static String[] parse(String line) {
|
||||||
|
@ -96,7 +96,6 @@ public final class CSVUtil {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Quote and escape input value for CSV
|
* Quote and escape input value for CSV
|
||||||
* @param original
|
|
||||||
*/
|
*/
|
||||||
public static String quoteEscape(String original) {
|
public static String quoteEscape(String original) {
|
||||||
String result = original;
|
String result = original;
|
||||||
|
|
|
@ -61,8 +61,8 @@ public final class BeiderMorseFilter extends TokenFilter {
|
||||||
* Calls
|
* Calls
|
||||||
* {@link #BeiderMorseFilter(TokenStream, PhoneticEngine, org.apache.commons.codec.language.bm.Languages.LanguageSet)}
|
* {@link #BeiderMorseFilter(TokenStream, PhoneticEngine, org.apache.commons.codec.language.bm.Languages.LanguageSet)}
|
||||||
*
|
*
|
||||||
* @param input
|
* @param input TokenStream to filter
|
||||||
* @param engine
|
* @param engine configured PhoneticEngine with BM settings.
|
||||||
*/
|
*/
|
||||||
public BeiderMorseFilter(TokenStream input, PhoneticEngine engine) {
|
public BeiderMorseFilter(TokenStream input, PhoneticEngine engine) {
|
||||||
this(input, engine, null);
|
this(input, engine, null);
|
||||||
|
|
|
@ -139,9 +139,7 @@ class BigramDictionary extends AbstractDictionary {
|
||||||
* Load the datafile into this BigramDictionary
|
* Load the datafile into this BigramDictionary
|
||||||
*
|
*
|
||||||
* @param dctFilePath path to the Bigramdictionary (bigramdict.dct)
|
* @param dctFilePath path to the Bigramdictionary (bigramdict.dct)
|
||||||
* @throws FileNotFoundException
|
* @throws IOException If there is a low-level I/O error
|
||||||
* @throws IOException
|
|
||||||
* @throws UnsupportedEncodingException
|
|
||||||
*/
|
*/
|
||||||
public void loadFromFile(String dctFilePath) throws IOException {
|
public void loadFromFile(String dctFilePath) throws IOException {
|
||||||
|
|
||||||
|
|
|
@ -133,8 +133,7 @@ class WordDictionary extends AbstractDictionary {
|
||||||
/**
|
/**
|
||||||
* Load coredict.mem internally from the jar file.
|
* Load coredict.mem internally from the jar file.
|
||||||
*
|
*
|
||||||
* @throws ClassNotFoundException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public void load() throws IOException, ClassNotFoundException {
|
public void load() throws IOException, ClassNotFoundException {
|
||||||
InputStream input = this.getClass().getResourceAsStream("coredict.mem");
|
InputStream input = this.getClass().getResourceAsStream("coredict.mem");
|
||||||
|
@ -181,9 +180,7 @@ class WordDictionary extends AbstractDictionary {
|
||||||
*
|
*
|
||||||
* @param dctFilePath path to word dictionary (coredict.dct)
|
* @param dctFilePath path to word dictionary (coredict.dct)
|
||||||
* @return number of words read
|
* @return number of words read
|
||||||
* @throws FileNotFoundException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
* @throws IOException
|
|
||||||
* @throws UnsupportedEncodingException
|
|
||||||
*/
|
*/
|
||||||
private int loadMainDataFromFile(String dctFilePath) throws IOException {
|
private int loadMainDataFromFile(String dctFilePath) throws IOException {
|
||||||
int i, cnt, length, total = 0;
|
int i, cnt, length, total = 0;
|
||||||
|
|
|
@ -54,8 +54,7 @@ public abstract class BaseUIMATokenizer extends Tokenizer {
|
||||||
* <p/>
|
* <p/>
|
||||||
* {@link #cas} will be filled with extracted metadata (UIMA annotations, feature structures)
|
* {@link #cas} will be filled with extracted metadata (UIMA annotations, feature structures)
|
||||||
*
|
*
|
||||||
* @throws AnalysisEngineProcessException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
protected void analyzeInput() throws AnalysisEngineProcessException, IOException {
|
protected void analyzeInput() throws AnalysisEngineProcessException, IOException {
|
||||||
cas.reset();
|
cas.reset();
|
||||||
|
@ -66,7 +65,7 @@ public abstract class BaseUIMATokenizer extends Tokenizer {
|
||||||
/**
|
/**
|
||||||
* initialize the FSIterator which is used to build tokens at each incrementToken() method call
|
* initialize the FSIterator which is used to build tokens at each incrementToken() method call
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
protected abstract void initializeIterator() throws IOException;
|
protected abstract void initializeIterator() throws IOException;
|
||||||
|
|
||||||
|
|
|
@ -27,9 +27,7 @@ import org.apache.uima.resource.ResourceInitializationException;
|
||||||
public interface AEProvider {
|
public interface AEProvider {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Returns the AnalysisEngine
|
||||||
* @return AnalysisEngine
|
|
||||||
* @throws ResourceInitializationException
|
|
||||||
*/
|
*/
|
||||||
public AnalysisEngine getAE() throws ResourceInitializationException;
|
public AnalysisEngine getAE() throws ResourceInitializationException;
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,7 @@ public interface HTMLParser {
|
||||||
* @param reader reader of html text to parse.
|
* @param reader reader of html text to parse.
|
||||||
* @param trecSrc the {@link TrecContentSource} used to parse dates.
|
* @param trecSrc the {@link TrecContentSource} used to parse dates.
|
||||||
* @return Parsed doc data.
|
* @return Parsed doc data.
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public DocData parse(DocData docData, String name, Date date, Reader reader, TrecContentSource trecSrc) throws IOException;
|
public DocData parse(DocData docData, String name, Date date, Reader reader, TrecContentSource trecSrc) throws IOException;
|
||||||
|
|
||||||
|
|
|
@ -37,8 +37,7 @@ public interface QueryMaker {
|
||||||
/** Create the next query */
|
/** Create the next query */
|
||||||
public Query makeQuery () throws Exception;
|
public Query makeQuery () throws Exception;
|
||||||
|
|
||||||
/** Set the properties
|
/** Set the properties */
|
||||||
* @throws Exception */
|
|
||||||
public void setConfig (Config config) throws Exception;
|
public void setConfig (Config config) throws Exception;
|
||||||
|
|
||||||
/** Reset inputs so that the test run would behave, input wise, as if it just started. */
|
/** Reset inputs so that the test run would behave, input wise, as if it just started. */
|
||||||
|
|
|
@ -130,8 +130,8 @@ public class TrecContentSource extends ContentSource {
|
||||||
* @param lineStart line start to look for, must not be null.
|
* @param lineStart line start to look for, must not be null.
|
||||||
* @param collectMatchLine whether to collect the matching line into <code>buffer</code>.
|
* @param collectMatchLine whether to collect the matching line into <code>buffer</code>.
|
||||||
* @param collectAll whether to collect all lines into <code>buffer</code>.
|
* @param collectAll whether to collect all lines into <code>buffer</code>.
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
* @throws NoMoreDataException
|
* @throws NoMoreDataException If the source is exhausted.
|
||||||
*/
|
*/
|
||||||
private void read(StringBuilder buf, String lineStart,
|
private void read(StringBuilder buf, String lineStart,
|
||||||
boolean collectMatchLine, boolean collectAll) throws IOException, NoMoreDataException {
|
boolean collectMatchLine, boolean collectAll) throws IOException, NoMoreDataException {
|
||||||
|
|
|
@ -32,11 +32,6 @@ import org.apache.lucene.benchmark.byTask.utils.Config;
|
||||||
*/
|
*/
|
||||||
public class Sample {
|
public class Sample {
|
||||||
|
|
||||||
/**
|
|
||||||
* @param args
|
|
||||||
* @throws Exception
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
Properties p = initProps();
|
Properties p = initProps();
|
||||||
Config conf = new Config(p);
|
Config conf = new Config(p);
|
||||||
|
|
|
@ -109,7 +109,6 @@ public class TaskSequence extends PerfTask {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param repetitions The repetitions to set.
|
* @param repetitions The repetitions to set.
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void setRepetitions(int repetitions) throws Exception {
|
public void setRepetitions(int repetitions) throws Exception {
|
||||||
fixedTime = false;
|
fixedTime = false;
|
||||||
|
|
|
@ -295,7 +295,6 @@ public class Algorithm {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute this algorithm
|
* Execute this algorithm
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void execute() throws Exception {
|
public void execute() throws Exception {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -58,7 +58,7 @@ public class Config {
|
||||||
* Read both algorithm and config properties.
|
* Read both algorithm and config properties.
|
||||||
*
|
*
|
||||||
* @param algReader from where to read algorithm and config properties.
|
* @param algReader from where to read algorithm and config properties.
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public Config(Reader algReader) throws IOException {
|
public Config(Reader algReader) throws IOException {
|
||||||
// read alg file to array of lines
|
// read alg file to array of lines
|
||||||
|
@ -163,7 +163,6 @@ public class Config {
|
||||||
*
|
*
|
||||||
* @param name name of property.
|
* @param name name of property.
|
||||||
* @param value either single or multiple property value (multiple values are separated by ":")
|
* @param value either single or multiple property value (multiple values are separated by ":")
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void set(String name, String value) throws Exception {
|
public void set(String name, String value) throws Exception {
|
||||||
if (valByRound.get(name) != null) {
|
if (valByRound.get(name) != null) {
|
||||||
|
|
|
@ -30,7 +30,7 @@ public class FileUtils {
|
||||||
*
|
*
|
||||||
* @param dir file or directory
|
* @param dir file or directory
|
||||||
* @return true on success, false if no or part of files have been deleted
|
* @return true on success, false if no or part of files have been deleted
|
||||||
* @throws java.io.IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public static boolean fullyDelete(File dir) throws IOException {
|
public static boolean fullyDelete(File dir) throws IOException {
|
||||||
if (dir == null || !dir.exists()) return false;
|
if (dir == null || !dir.exists()) return false;
|
||||||
|
|
|
@ -46,7 +46,7 @@ public class TrecJudge implements Judge {
|
||||||
* 19 0 doc7295 0
|
* 19 0 doc7295 0
|
||||||
* </pre>
|
* </pre>
|
||||||
* @param reader where judgments are read from.
|
* @param reader where judgments are read from.
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public TrecJudge (BufferedReader reader) throws IOException {
|
public TrecJudge (BufferedReader reader) throws IOException {
|
||||||
judgements = new HashMap<String,QRelJudgement>();
|
judgements = new HashMap<String,QRelJudgement>();
|
||||||
|
|
|
@ -74,8 +74,6 @@ public class ExtractReuters {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Override if you wish to change what is extracted
|
* Override if you wish to change what is extracted
|
||||||
*
|
|
||||||
* @param sgmFile
|
|
||||||
*/
|
*/
|
||||||
protected void extractFile(File sgmFile) {
|
protected void extractFile(File sgmFile) {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -32,7 +32,7 @@ public interface Classifier {
|
||||||
* Assign a class to the given text String
|
* Assign a class to the given text String
|
||||||
* @param text a String containing text to be classified
|
* @param text a String containing text to be classified
|
||||||
* @return a String representing a class
|
* @return a String representing a class
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public String assignClass(String text) throws IOException;
|
public String assignClass(String text) throws IOException;
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ public interface Classifier {
|
||||||
* @param textFieldName the name of the field used to compare documents
|
* @param textFieldName the name of the field used to compare documents
|
||||||
* @param classFieldName the name of the field containing the class assigned to documents
|
* @param classFieldName the name of the field containing the class assigned to documents
|
||||||
* @param analyzer the analyzer used to tokenize / filter the unseen text
|
* @param analyzer the analyzer used to tokenize / filter the unseen text
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public void train(AtomicReader atomicReader, String textFieldName, String classFieldName, Analyzer analyzer)
|
public void train(AtomicReader atomicReader, String textFieldName, String classFieldName, Analyzer analyzer)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
|
|
|
@ -153,7 +153,7 @@ final class ForUtil {
|
||||||
* @param data the data to write
|
* @param data the data to write
|
||||||
* @param encoded a buffer to use to encode data
|
* @param encoded a buffer to use to encode data
|
||||||
* @param out the destination output
|
* @param out the destination output
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
void writeBlock(int[] data, byte[] encoded, IndexOutput out) throws IOException {
|
void writeBlock(int[] data, byte[] encoded, IndexOutput out) throws IOException {
|
||||||
if (isAllEqual(data)) {
|
if (isAllEqual(data)) {
|
||||||
|
@ -182,7 +182,7 @@ final class ForUtil {
|
||||||
* @param in the input to use to read data
|
* @param in the input to use to read data
|
||||||
* @param encoded a buffer that can be used to store encoded data
|
* @param encoded a buffer that can be used to store encoded data
|
||||||
* @param decoded where to write decoded data
|
* @param decoded where to write decoded data
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
void readBlock(IndexInput in, byte[] encoded, int[] decoded) throws IOException {
|
void readBlock(IndexInput in, byte[] encoded, int[] decoded) throws IOException {
|
||||||
final int numBits = in.readVInt();
|
final int numBits = in.readVInt();
|
||||||
|
@ -208,7 +208,7 @@ final class ForUtil {
|
||||||
* Skip the next block of data.
|
* Skip the next block of data.
|
||||||
*
|
*
|
||||||
* @param in the input where to read data
|
* @param in the input where to read data
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
void skipBlock(IndexInput in) throws IOException {
|
void skipBlock(IndexInput in) throws IOException {
|
||||||
final int numBits = in.readVInt();
|
final int numBits = in.readVInt();
|
||||||
|
|
|
@ -111,7 +111,6 @@ public class FuzzySet {
|
||||||
/**
|
/**
|
||||||
* Use this method to choose a set size where accuracy (low content saturation) is more important
|
* Use this method to choose a set size where accuracy (low content saturation) is more important
|
||||||
* than deciding how much memory to throw at the problem.
|
* than deciding how much memory to throw at the problem.
|
||||||
* @param maxNumberOfValuesExpected
|
|
||||||
* @param desiredSaturation A number between 0 and 1 expressing the % of bits set once all values have been recorded
|
* @param desiredSaturation A number between 0 and 1 expressing the % of bits set once all values have been recorded
|
||||||
* @return The size of the set nearest to the required size
|
* @return The size of the set nearest to the required size
|
||||||
*/
|
*/
|
||||||
|
@ -155,7 +154,6 @@ public class FuzzySet {
|
||||||
/**
|
/**
|
||||||
* The main method required for a Bloom filter which, given a value determines set membership.
|
* The main method required for a Bloom filter which, given a value determines set membership.
|
||||||
* Unlike a conventional set, the fuzzy set returns NO or MAYBE rather than true or false.
|
* Unlike a conventional set, the fuzzy set returns NO or MAYBE rather than true or false.
|
||||||
* @param value
|
|
||||||
* @return NO or MAYBE
|
* @return NO or MAYBE
|
||||||
*/
|
*/
|
||||||
public ContainsResult contains(BytesRef value) {
|
public ContainsResult contains(BytesRef value) {
|
||||||
|
@ -182,7 +180,7 @@ public class FuzzySet {
|
||||||
* returned by {@link FixedBitSet#getBits}</li>
|
* returned by {@link FixedBitSet#getBits}</li>
|
||||||
* </ul>
|
* </ul>
|
||||||
* @param out Data output stream
|
* @param out Data output stream
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public void serialize(DataOutput out) throws IOException
|
public void serialize(DataOutput out) throws IOException
|
||||||
{
|
{
|
||||||
|
@ -229,7 +227,7 @@ public class FuzzySet {
|
||||||
* Records a value in the set. The referenced bytes are hashed and then modulo n'd where n is the
|
* Records a value in the set. The referenced bytes are hashed and then modulo n'd where n is the
|
||||||
* chosen size of the internal bitset.
|
* chosen size of the internal bitset.
|
||||||
* @param value the key value to be hashed
|
* @param value the key value to be hashed
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public void addValue(BytesRef value) throws IOException {
|
public void addValue(BytesRef value) throws IOException {
|
||||||
int hash = hashFunction.hash(value);
|
int hash = hashFunction.hash(value);
|
||||||
|
|
|
@ -523,7 +523,7 @@ public class Token extends CharTermAttributeImpl
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Copy the prototype token's fields into this one. Note: Payloads are shared.
|
* Copy the prototype token's fields into this one. Note: Payloads are shared.
|
||||||
* @param prototype
|
* @param prototype source Token to copy fields from
|
||||||
*/
|
*/
|
||||||
public void reinit(Token prototype) {
|
public void reinit(Token prototype) {
|
||||||
copyBuffer(prototype.buffer(), 0, prototype.length());
|
copyBuffer(prototype.buffer(), 0, prototype.length());
|
||||||
|
|
|
@ -160,7 +160,7 @@ public abstract class TokenStream extends AttributeSource implements Closeable {
|
||||||
* differ from the offset of the last token eg in case one or more whitespaces
|
* differ from the offset of the last token eg in case one or more whitespaces
|
||||||
* followed after the last token, but a WhitespaceTokenizer was used.
|
* followed after the last token, but a WhitespaceTokenizer was used.
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException If an I/O error occurs
|
||||||
*/
|
*/
|
||||||
public void end() throws IOException {
|
public void end() throws IOException {
|
||||||
// do nothing by default
|
// do nothing by default
|
||||||
|
|
|
@ -84,7 +84,7 @@ public abstract class DocValuesConsumer {
|
||||||
* the total number of documents in this {@link DocValuesConsumer}.
|
* the total number of documents in this {@link DocValuesConsumer}.
|
||||||
* Must be greater than or equal the last given docID to
|
* Must be greater than or equal the last given docID to
|
||||||
* {@link #add(int, StorableField)}.
|
* {@link #add(int, StorableField)}.
|
||||||
* @throws IOException
|
* @throws IOException If an I/O error occurs
|
||||||
*/
|
*/
|
||||||
public abstract void finish(int docCount) throws IOException;
|
public abstract void finish(int docCount) throws IOException;
|
||||||
|
|
||||||
|
|
|
@ -121,7 +121,7 @@ public abstract class MultiLevelSkipListWriter {
|
||||||
* the max level is skip data is to be written to.
|
* the max level is skip data is to be written to.
|
||||||
*
|
*
|
||||||
* @param df the current document frequency
|
* @param df the current document frequency
|
||||||
* @throws IOException
|
* @throws IOException If an I/O error occurs
|
||||||
*/
|
*/
|
||||||
public void bufferSkip(int df) throws IOException {
|
public void bufferSkip(int df) throws IOException {
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,7 @@ public abstract class PerDocProducer implements Closeable {
|
||||||
* the field name
|
* the field name
|
||||||
* @return the {@link DocValues} for this field or <code>null</code> if not
|
* @return the {@link DocValues} for this field or <code>null</code> if not
|
||||||
* applicable.
|
* applicable.
|
||||||
* @throws IOException
|
* @throws IOException If an I/O error occurs
|
||||||
*/
|
*/
|
||||||
public abstract DocValues docValues(String field) throws IOException;
|
public abstract DocValues docValues(String field) throws IOException;
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,7 @@ public abstract class SegmentInfoReader {
|
||||||
* @param directory directory to read from
|
* @param directory directory to read from
|
||||||
* @param segmentName name of the segment to read
|
* @param segmentName name of the segment to read
|
||||||
* @return infos instance to be populated with data
|
* @return infos instance to be populated with data
|
||||||
* @throws IOException
|
* @throws IOException If an I/O error occurs
|
||||||
*/
|
*/
|
||||||
public abstract SegmentInfo read(Directory directory, String segmentName, IOContext context) throws IOException;
|
public abstract SegmentInfo read(Directory directory, String segmentName, IOContext context) throws IOException;
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,7 @@ public abstract class SegmentInfoWriter {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write {@link SegmentInfo} data.
|
* Write {@link SegmentInfo} data.
|
||||||
* @throws IOException
|
* @throws IOException If an I/O error occurs
|
||||||
*/
|
*/
|
||||||
public abstract void write(Directory dir, SegmentInfo info, FieldInfos fis, IOContext ioContext) throws IOException;
|
public abstract void write(Directory dir, SegmentInfo info, FieldInfos fis, IOContext ioContext) throws IOException;
|
||||||
}
|
}
|
||||||
|
|
|
@ -120,7 +120,7 @@ public final class Lucene40StoredFieldsReader extends StoredFieldsReader impleme
|
||||||
* Closes the underlying {@link org.apache.lucene.store.IndexInput} streams.
|
* Closes the underlying {@link org.apache.lucene.store.IndexInput} streams.
|
||||||
* This means that the Fields values will not be accessible.
|
* This means that the Fields values will not be accessible.
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException If an I/O error occurs
|
||||||
*/
|
*/
|
||||||
public final void close() throws IOException {
|
public final void close() throws IOException {
|
||||||
if (!closed) {
|
if (!closed) {
|
||||||
|
|
|
@ -99,7 +99,7 @@ public abstract class DirectoryReader extends BaseCompositeReader<AtomicReader>
|
||||||
* can tolerate deleted documents being returned you might
|
* can tolerate deleted documents being returned you might
|
||||||
* gain some performance by passing false.
|
* gain some performance by passing false.
|
||||||
* @return The new IndexReader
|
* @return The new IndexReader
|
||||||
* @throws CorruptIndexException
|
* @throws CorruptIndexException if the index is corrupt
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*
|
*
|
||||||
* @see #openIfChanged(DirectoryReader,IndexWriter,boolean)
|
* @see #openIfChanged(DirectoryReader,IndexWriter,boolean)
|
||||||
|
@ -241,7 +241,7 @@ public abstract class DirectoryReader extends BaseCompositeReader<AtomicReader>
|
||||||
* can tolerate deleted documents being returned you might
|
* can tolerate deleted documents being returned you might
|
||||||
* gain some performance by passing false.
|
* gain some performance by passing false.
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level IO error
|
||||||
*
|
*
|
||||||
* @lucene.experimental
|
* @lucene.experimental
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -146,10 +146,13 @@ public class FieldInfos implements Iterable<FieldInfo> {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the fieldinfo object referenced by the fieldNumber.
|
* Return the fieldinfo object referenced by the fieldNumber.
|
||||||
* @param fieldNumber
|
* @param fieldNumber field's number. if this is negative, this method
|
||||||
|
* always returns null.
|
||||||
* @return the FieldInfo object or null when the given fieldNumber
|
* @return the FieldInfo object or null when the given fieldNumber
|
||||||
* doesn't exist.
|
* doesn't exist.
|
||||||
*/
|
*/
|
||||||
|
// TODO: fix this negative behavior, this was something related to Lucene3x?
|
||||||
|
// if the field name is empty, i think it writes the fieldNumber as -1
|
||||||
public FieldInfo fieldInfo(int fieldNumber) {
|
public FieldInfo fieldInfo(int fieldNumber) {
|
||||||
return (fieldNumber >= 0) ? byNumber.get(fieldNumber) : null;
|
return (fieldNumber >= 0) ? byNumber.get(fieldNumber) : null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -150,7 +150,8 @@ public abstract class FilteredTermsEnum extends TermsEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** This enum does not support seeking!
|
/** This enum does not support seeking!
|
||||||
* @throws UnsupportedOperationException
|
* @throws UnsupportedOperationException In general, subclasses do not
|
||||||
|
* support seeking.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean seekExact(BytesRef term, boolean useCache) throws IOException {
|
public boolean seekExact(BytesRef term, boolean useCache) throws IOException {
|
||||||
|
@ -158,7 +159,8 @@ public abstract class FilteredTermsEnum extends TermsEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** This enum does not support seeking!
|
/** This enum does not support seeking!
|
||||||
* @throws UnsupportedOperationException
|
* @throws UnsupportedOperationException In general, subclasses do not
|
||||||
|
* support seeking.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public SeekStatus seekCeil(BytesRef term, boolean useCache) throws IOException {
|
public SeekStatus seekCeil(BytesRef term, boolean useCache) throws IOException {
|
||||||
|
@ -166,7 +168,8 @@ public abstract class FilteredTermsEnum extends TermsEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** This enum does not support seeking!
|
/** This enum does not support seeking!
|
||||||
* @throws UnsupportedOperationException
|
* @throws UnsupportedOperationException In general, subclasses do not
|
||||||
|
* support seeking.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void seekExact(long ord) throws IOException {
|
public void seekExact(long ord) throws IOException {
|
||||||
|
@ -189,7 +192,8 @@ public abstract class FilteredTermsEnum extends TermsEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** This enum does not support seeking!
|
/** This enum does not support seeking!
|
||||||
* @throws UnsupportedOperationException
|
* @throws UnsupportedOperationException In general, subclasses do not
|
||||||
|
* support seeking.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void seekExact(BytesRef term, TermState state) throws IOException {
|
public void seekExact(BytesRef term, TermState state) throws IOException {
|
||||||
|
|
|
@ -320,7 +320,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
* @return IndexReader that covers entire index plus all
|
* @return IndexReader that covers entire index plus all
|
||||||
* changes made so far by this IndexWriter instance
|
* changes made so far by this IndexWriter instance
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
DirectoryReader getReader(boolean applyAllDeletes) throws IOException {
|
DirectoryReader getReader(boolean applyAllDeletes) throws IOException {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
|
@ -473,7 +473,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
* Commit live docs changes for the segment readers for
|
* Commit live docs changes for the segment readers for
|
||||||
* the provided infos.
|
* the provided infos.
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public synchronized void commit(SegmentInfos infos) throws IOException {
|
public synchronized void commit(SegmentInfos infos) throws IOException {
|
||||||
for (SegmentInfoPerCommit info : infos) {
|
for (SegmentInfoPerCommit info : infos) {
|
||||||
|
|
|
@ -51,7 +51,7 @@ public final class ReaderManager extends ReferenceManager<DirectoryReader> {
|
||||||
* performance by passing <code>false</code>. See
|
* performance by passing <code>false</code>. See
|
||||||
* {@link DirectoryReader#openIfChanged(DirectoryReader, IndexWriter, boolean)}.
|
* {@link DirectoryReader#openIfChanged(DirectoryReader, IndexWriter, boolean)}.
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public ReaderManager(IndexWriter writer, boolean applyAllDeletes) throws IOException {
|
public ReaderManager(IndexWriter writer, boolean applyAllDeletes) throws IOException {
|
||||||
current = DirectoryReader.open(writer, applyAllDeletes);
|
current = DirectoryReader.open(writer, applyAllDeletes);
|
||||||
|
@ -61,7 +61,7 @@ public final class ReaderManager extends ReferenceManager<DirectoryReader> {
|
||||||
* Creates and returns a new ReaderManager from the given {@link Directory}.
|
* Creates and returns a new ReaderManager from the given {@link Directory}.
|
||||||
* @param dir the directory to open the DirectoryReader on.
|
* @param dir the directory to open the DirectoryReader on.
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public ReaderManager(Directory dir) throws IOException {
|
public ReaderManager(Directory dir) throws IOException {
|
||||||
current = DirectoryReader.open(dir);
|
current = DirectoryReader.open(dir);
|
||||||
|
|
|
@ -70,7 +70,6 @@ final class SegmentMerger {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add an IndexReader to the collection of readers that are to be merged
|
* Add an IndexReader to the collection of readers that are to be merged
|
||||||
* @param reader
|
|
||||||
*/
|
*/
|
||||||
final void add(IndexReader reader) {
|
final void add(IndexReader reader) {
|
||||||
for (final AtomicReaderContext ctx : reader.leaves()) {
|
for (final AtomicReaderContext ctx : reader.leaves()) {
|
||||||
|
@ -253,7 +252,7 @@ final class SegmentMerger {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Merge the TermVectors from each of the segments into the new one.
|
* Merge the TermVectors from each of the segments into the new one.
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
private final int mergeVectors() throws IOException {
|
private final int mergeVectors() throws IOException {
|
||||||
final TermVectorsWriter termVectorsWriter = codec.termVectorsFormat().vectorsWriter(directory, mergeState.segmentInfo, context);
|
final TermVectorsWriter termVectorsWriter = codec.termVectorsFormat().vectorsWriter(directory, mergeState.segmentInfo, context);
|
||||||
|
|
|
@ -168,12 +168,6 @@ public final class SegmentReader extends AtomicReader {
|
||||||
return core.termVectorsLocal.get();
|
return core.termVectorsLocal.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return a term frequency vector for the specified document and field. The
|
|
||||||
* vector returned contains term numbers and frequencies for all terms in
|
|
||||||
* the specified field of this document, if the field had storeTermVector
|
|
||||||
* flag set. If the flag was not set, the method returns null.
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public Fields getTermVectors(int docID) throws IOException {
|
public Fields getTermVectors(int docID) throws IOException {
|
||||||
TermVectorsReader termVectorsReader = getTermVectorsReader();
|
TermVectorsReader termVectorsReader = getTermVectorsReader();
|
||||||
|
|
|
@ -56,7 +56,7 @@ public final class Term implements Comparable<Term> {
|
||||||
* This serves two purposes: 1) reuse of a Term with the same field.
|
* This serves two purposes: 1) reuse of a Term with the same field.
|
||||||
* 2) pattern for a query.
|
* 2) pattern for a query.
|
||||||
*
|
*
|
||||||
* @param fld
|
* @param fld field's name
|
||||||
*/
|
*/
|
||||||
public Term(String fld) {
|
public Term(String fld) {
|
||||||
this(fld, new BytesRef());
|
this(fld, new BytesRef());
|
||||||
|
|
|
@ -148,7 +148,7 @@ public abstract class FieldComparator<T> {
|
||||||
* @return the comparator to use for this segment; most
|
* @return the comparator to use for this segment; most
|
||||||
* comparators can just return "this" to reuse the same
|
* comparators can just return "this" to reuse the same
|
||||||
* comparator across segments
|
* comparator across segments
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public abstract FieldComparator<T> setNextReader(AtomicReaderContext context) throws IOException;
|
public abstract FieldComparator<T> setNextReader(AtomicReaderContext context) throws IOException;
|
||||||
|
|
||||||
|
|
|
@ -159,7 +159,7 @@ public abstract class FieldValueHitQueue<T extends FieldValueHitQueue.Entry> ext
|
||||||
* priority first); cannot be <code>null</code> or empty
|
* priority first); cannot be <code>null</code> or empty
|
||||||
* @param size
|
* @param size
|
||||||
* The number of hits to retain. Must be greater than zero.
|
* The number of hits to retain. Must be greater than zero.
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public static <T extends FieldValueHitQueue.Entry> FieldValueHitQueue<T> create(SortField[] fields, int size) throws IOException {
|
public static <T extends FieldValueHitQueue.Entry> FieldValueHitQueue<T> create(SortField[] fields, int size) throws IOException {
|
||||||
|
|
||||||
|
|
|
@ -96,7 +96,7 @@ public class FuzzyTermsEnum extends TermsEnum {
|
||||||
* @param minSimilarity Minimum required similarity for terms from the reader. Pass an integer value
|
* @param minSimilarity Minimum required similarity for terms from the reader. Pass an integer value
|
||||||
* representing edit distance. Passing a fraction is deprecated.
|
* representing edit distance. Passing a fraction is deprecated.
|
||||||
* @param prefixLength Length of required common prefix. Default value is 0.
|
* @param prefixLength Length of required common prefix. Default value is 0.
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public FuzzyTermsEnum(Terms terms, AttributeSource atts, Term term,
|
public FuzzyTermsEnum(Terms terms, AttributeSource atts, Term term,
|
||||||
final float minSimilarity, final int prefixLength, boolean transpositions) throws IOException {
|
final float minSimilarity, final int prefixLength, boolean transpositions) throws IOException {
|
||||||
|
|
|
@ -220,7 +220,8 @@ public class IndexSearcher {
|
||||||
* this method can be used for efficient 'deep-paging' across potentially
|
* this method can be used for efficient 'deep-paging' across potentially
|
||||||
* large result sets.
|
* large result sets.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public TopDocs searchAfter(ScoreDoc after, Query query, int n) throws IOException {
|
public TopDocs searchAfter(ScoreDoc after, Query query, int n) throws IOException {
|
||||||
return search(createNormalizedWeight(query), after, n);
|
return search(createNormalizedWeight(query), after, n);
|
||||||
|
@ -234,7 +235,8 @@ public class IndexSearcher {
|
||||||
* this method can be used for efficient 'deep-paging' across potentially
|
* this method can be used for efficient 'deep-paging' across potentially
|
||||||
* large result sets.
|
* large result sets.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n) throws IOException {
|
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n) throws IOException {
|
||||||
return search(createNormalizedWeight(wrapFilter(query, filter)), after, n);
|
return search(createNormalizedWeight(wrapFilter(query, filter)), after, n);
|
||||||
|
@ -243,7 +245,8 @@ public class IndexSearcher {
|
||||||
/** Finds the top <code>n</code>
|
/** Finds the top <code>n</code>
|
||||||
* hits for <code>query</code>.
|
* hits for <code>query</code>.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public TopDocs search(Query query, int n)
|
public TopDocs search(Query query, int n)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -254,7 +257,8 @@ public class IndexSearcher {
|
||||||
/** Finds the top <code>n</code>
|
/** Finds the top <code>n</code>
|
||||||
* hits for <code>query</code>, applying <code>filter</code> if non-null.
|
* hits for <code>query</code>, applying <code>filter</code> if non-null.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public TopDocs search(Query query, Filter filter, int n)
|
public TopDocs search(Query query, Filter filter, int n)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -269,7 +273,8 @@ public class IndexSearcher {
|
||||||
* @param query to match documents
|
* @param query to match documents
|
||||||
* @param filter if non-null, used to permit documents to be collected.
|
* @param filter if non-null, used to permit documents to be collected.
|
||||||
* @param results to receive hits
|
* @param results to receive hits
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public void search(Query query, Filter filter, Collector results)
|
public void search(Query query, Filter filter, Collector results)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -277,11 +282,12 @@ public class IndexSearcher {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Lower-level search API.
|
/** Lower-level search API.
|
||||||
*
|
*
|
||||||
* <p>{@link Collector#collect(int)} is called for every matching document.
|
* <p>{@link Collector#collect(int)} is called for every matching document.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
*/
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
|
*/
|
||||||
public void search(Query query, Collector results)
|
public void search(Query query, Collector results)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
search(leafContexts, createNormalizedWeight(query), results);
|
search(leafContexts, createNormalizedWeight(query), results);
|
||||||
|
@ -296,7 +302,8 @@ public class IndexSearcher {
|
||||||
* {@link IndexSearcher#search(Query,Filter,int,Sort,boolean,boolean)} to
|
* {@link IndexSearcher#search(Query,Filter,int,Sort,boolean,boolean)} to
|
||||||
* control scoring.
|
* control scoring.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public TopFieldDocs search(Query query, Filter filter, int n,
|
public TopFieldDocs search(Query query, Filter filter, int n,
|
||||||
Sort sort) throws IOException {
|
Sort sort) throws IOException {
|
||||||
|
@ -314,7 +321,8 @@ public class IndexSearcher {
|
||||||
* <code>true</code> then the maximum score over all
|
* <code>true</code> then the maximum score over all
|
||||||
* collected hits will be computed.
|
* collected hits will be computed.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public TopFieldDocs search(Query query, Filter filter, int n,
|
public TopFieldDocs search(Query query, Filter filter, int n,
|
||||||
Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException {
|
Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException {
|
||||||
|
@ -329,7 +337,8 @@ public class IndexSearcher {
|
||||||
* this method can be used for efficient 'deep-paging' across potentially
|
* this method can be used for efficient 'deep-paging' across potentially
|
||||||
* large result sets.
|
* large result sets.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort) throws IOException {
|
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort) throws IOException {
|
||||||
if (after != null && !(after instanceof FieldDoc)) {
|
if (after != null && !(after instanceof FieldDoc)) {
|
||||||
|
@ -346,7 +355,7 @@ public class IndexSearcher {
|
||||||
* @param n Return only the top n results
|
* @param n Return only the top n results
|
||||||
* @param sort The {@link org.apache.lucene.search.Sort} object
|
* @param sort The {@link org.apache.lucene.search.Sort} object
|
||||||
* @return The top docs, sorted according to the supplied {@link org.apache.lucene.search.Sort} instance
|
* @return The top docs, sorted according to the supplied {@link org.apache.lucene.search.Sort} instance
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public TopFieldDocs search(Query query, int n,
|
public TopFieldDocs search(Query query, int n,
|
||||||
Sort sort) throws IOException {
|
Sort sort) throws IOException {
|
||||||
|
@ -361,7 +370,8 @@ public class IndexSearcher {
|
||||||
* this method can be used for efficient 'deep-paging' across potentially
|
* this method can be used for efficient 'deep-paging' across potentially
|
||||||
* large result sets.
|
* large result sets.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public TopDocs searchAfter(ScoreDoc after, Query query, int n, Sort sort) throws IOException {
|
public TopDocs searchAfter(ScoreDoc after, Query query, int n, Sort sort) throws IOException {
|
||||||
if (after != null && !(after instanceof FieldDoc)) {
|
if (after != null && !(after instanceof FieldDoc)) {
|
||||||
|
@ -385,7 +395,8 @@ public class IndexSearcher {
|
||||||
* <code>true</code> then the maximum score over all
|
* <code>true</code> then the maximum score over all
|
||||||
* collected hits will be computed.
|
* collected hits will be computed.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort,
|
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort,
|
||||||
boolean doDocScores, boolean doMaxScore) throws IOException {
|
boolean doDocScores, boolean doMaxScore) throws IOException {
|
||||||
|
@ -403,7 +414,8 @@ public class IndexSearcher {
|
||||||
*
|
*
|
||||||
* <p>Applications should usually call {@link IndexSearcher#search(Query,int)} or
|
* <p>Applications should usually call {@link IndexSearcher#search(Query,int)} or
|
||||||
* {@link IndexSearcher#search(Query,Filter,int)} instead.
|
* {@link IndexSearcher#search(Query,Filter,int)} instead.
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
protected TopDocs search(Weight weight, ScoreDoc after, int nDocs) throws IOException {
|
protected TopDocs search(Weight weight, ScoreDoc after, int nDocs) throws IOException {
|
||||||
if (executor == null) {
|
if (executor == null) {
|
||||||
|
@ -440,7 +452,8 @@ public class IndexSearcher {
|
||||||
*
|
*
|
||||||
* <p>Applications should usually call {@link IndexSearcher#search(Query,int)} or
|
* <p>Applications should usually call {@link IndexSearcher#search(Query,int)} or
|
||||||
* {@link IndexSearcher#search(Query,Filter,int)} instead.
|
* {@link IndexSearcher#search(Query,Filter,int)} instead.
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
protected TopDocs search(List<AtomicReaderContext> leaves, Weight weight, ScoreDoc after, int nDocs) throws IOException {
|
protected TopDocs search(List<AtomicReaderContext> leaves, Weight weight, ScoreDoc after, int nDocs) throws IOException {
|
||||||
// single thread
|
// single thread
|
||||||
|
@ -463,7 +476,8 @@ public class IndexSearcher {
|
||||||
* <p>Applications should usually call {@link
|
* <p>Applications should usually call {@link
|
||||||
* IndexSearcher#search(Query,Filter,int,Sort)} instead.
|
* IndexSearcher#search(Query,Filter,int,Sort)} instead.
|
||||||
*
|
*
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
protected TopFieldDocs search(Weight weight,
|
protected TopFieldDocs search(Weight weight,
|
||||||
final int nDocs, Sort sort,
|
final int nDocs, Sort sort,
|
||||||
|
@ -559,7 +573,8 @@ public class IndexSearcher {
|
||||||
* to match documents
|
* to match documents
|
||||||
* @param collector
|
* @param collector
|
||||||
* to receive hits
|
* to receive hits
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
protected void search(List<AtomicReaderContext> leaves, Weight weight, Collector collector)
|
protected void search(List<AtomicReaderContext> leaves, Weight weight, Collector collector)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -577,7 +592,8 @@ public class IndexSearcher {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Expert: called to re-write queries into primitive queries.
|
/** Expert: called to re-write queries into primitive queries.
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
public Query rewrite(Query original) throws IOException {
|
public Query rewrite(Query original) throws IOException {
|
||||||
Query query = original;
|
Query query = original;
|
||||||
|
@ -609,7 +625,8 @@ public class IndexSearcher {
|
||||||
* Computing an explanation is as expensive as executing the query over the
|
* Computing an explanation is as expensive as executing the query over the
|
||||||
* entire index.
|
* entire index.
|
||||||
* <p>Applications should call {@link IndexSearcher#explain(Query, int)}.
|
* <p>Applications should call {@link IndexSearcher#explain(Query, int)}.
|
||||||
* @throws BooleanQuery.TooManyClauses
|
* @throws BooleanQuery.TooManyClauses If a query would exceed
|
||||||
|
* {@link BooleanQuery#getMaxClauseCount()} clauses.
|
||||||
*/
|
*/
|
||||||
protected Explanation explain(Weight weight, int doc) throws IOException {
|
protected Explanation explain(Weight weight, int doc) throws IOException {
|
||||||
int n = ReaderUtil.subIndex(doc, leafContexts);
|
int n = ReaderUtil.subIndex(doc, leafContexts);
|
||||||
|
|
|
@ -86,8 +86,6 @@ public class MultiPhraseQuery extends Query {
|
||||||
* Allows to specify the relative position of terms within the phrase.
|
* Allows to specify the relative position of terms within the phrase.
|
||||||
*
|
*
|
||||||
* @see PhraseQuery#add(Term, int)
|
* @see PhraseQuery#add(Term, int)
|
||||||
* @param terms
|
|
||||||
* @param position
|
|
||||||
*/
|
*/
|
||||||
public void add(Term[] terms, int position) {
|
public void add(Term[] terms, int position) {
|
||||||
if (termArrays.size() == 0)
|
if (termArrays.size() == 0)
|
||||||
|
|
|
@ -35,7 +35,7 @@ public class NGramPhraseQuery extends PhraseQuery {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor that takes gram size.
|
* Constructor that takes gram size.
|
||||||
* @param n
|
* @param n n-gram size
|
||||||
*/
|
*/
|
||||||
public NGramPhraseQuery(int n){
|
public NGramPhraseQuery(int n){
|
||||||
super();
|
super();
|
||||||
|
|
|
@ -90,8 +90,6 @@ public class PhraseQuery extends Query {
|
||||||
* This allows e.g. phrases with more than one term at the same position
|
* This allows e.g. phrases with more than one term at the same position
|
||||||
* or phrases with gaps (e.g. in connection with stopwords).
|
* or phrases with gaps (e.g. in connection with stopwords).
|
||||||
*
|
*
|
||||||
* @param term
|
|
||||||
* @param position
|
|
||||||
*/
|
*/
|
||||||
public void add(Term term, int position) {
|
public void add(Term term, int position) {
|
||||||
if (terms.size() == 0) {
|
if (terms.size() == 0) {
|
||||||
|
|
|
@ -79,7 +79,7 @@ public final class SearcherManager extends ReferenceManager<IndexSearcher> {
|
||||||
* don't require the searcher to be warmed before going live or other
|
* don't require the searcher to be warmed before going live or other
|
||||||
* custom behavior.
|
* custom behavior.
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public SearcherManager(IndexWriter writer, boolean applyAllDeletes, SearcherFactory searcherFactory) throws IOException {
|
public SearcherManager(IndexWriter writer, boolean applyAllDeletes, SearcherFactory searcherFactory) throws IOException {
|
||||||
if (searcherFactory == null) {
|
if (searcherFactory == null) {
|
||||||
|
@ -96,7 +96,7 @@ public final class SearcherManager extends ReferenceManager<IndexSearcher> {
|
||||||
* <code>null</code> if you don't require the searcher to be warmed
|
* <code>null</code> if you don't require the searcher to be warmed
|
||||||
* before going live or other custom behavior.
|
* before going live or other custom behavior.
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public SearcherManager(Directory dir, SearcherFactory searcherFactory) throws IOException {
|
public SearcherManager(Directory dir, SearcherFactory searcherFactory) throws IOException {
|
||||||
if (searcherFactory == null) {
|
if (searcherFactory == null) {
|
||||||
|
|
|
@ -96,7 +96,6 @@ public class TimeLimitingCollector extends Collector {
|
||||||
* </pre>
|
* </pre>
|
||||||
* </p>
|
* </p>
|
||||||
* @see #setBaseline()
|
* @see #setBaseline()
|
||||||
* @param clockTime
|
|
||||||
*/
|
*/
|
||||||
public void setBaseline(long clockTime) {
|
public void setBaseline(long clockTime) {
|
||||||
t0 = clockTime;
|
t0 = clockTime;
|
||||||
|
|
|
@ -1060,7 +1060,7 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
||||||
* the given {@link Scorer} in {@link #setScorer(Scorer)}.
|
* the given {@link Scorer} in {@link #setScorer(Scorer)}.
|
||||||
* @return a {@link TopFieldCollector} instance which will sort the results by
|
* @return a {@link TopFieldCollector} instance which will sort the results by
|
||||||
* the sort criteria.
|
* the sort criteria.
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public static TopFieldCollector create(Sort sort, int numHits,
|
public static TopFieldCollector create(Sort sort, int numHits,
|
||||||
boolean fillFields, boolean trackDocScores, boolean trackMaxScore,
|
boolean fillFields, boolean trackDocScores, boolean trackMaxScore,
|
||||||
|
@ -1105,7 +1105,7 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
|
||||||
* the given {@link Scorer} in {@link #setScorer(Scorer)}.
|
* the given {@link Scorer} in {@link #setScorer(Scorer)}.
|
||||||
* @return a {@link TopFieldCollector} instance which will sort the results by
|
* @return a {@link TopFieldCollector} instance which will sort the results by
|
||||||
* the sort criteria.
|
* the sort criteria.
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public static TopFieldCollector create(Sort sort, int numHits, FieldDoc after,
|
public static TopFieldCollector create(Sort sort, int numHits, FieldDoc after,
|
||||||
boolean fillFields, boolean trackDocScores, boolean trackMaxScore,
|
boolean fillFields, boolean trackDocScores, boolean trackMaxScore,
|
||||||
|
|
|
@ -108,7 +108,7 @@ public abstract class Weight {
|
||||||
* but possibly filtering other documents)
|
* but possibly filtering other documents)
|
||||||
*
|
*
|
||||||
* @return a {@link Scorer} which scores documents in/out-of order.
|
* @return a {@link Scorer} which scores documents in/out-of order.
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public abstract Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
|
public abstract Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
|
||||||
boolean topScorer, Bits acceptDocs) throws IOException;
|
boolean topScorer, Bits acceptDocs) throws IOException;
|
||||||
|
|
|
@ -69,9 +69,9 @@ public class PayloadSpanUtil {
|
||||||
/**
|
/**
|
||||||
* Query should be rewritten for wild/fuzzy support.
|
* Query should be rewritten for wild/fuzzy support.
|
||||||
*
|
*
|
||||||
* @param query
|
* @param query rewritten query
|
||||||
* @return payloads Collection
|
* @return payloads Collection
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public Collection<byte[]> getPayloadsForQuery(Query query) throws IOException {
|
public Collection<byte[]> getPayloadsForQuery(Query query) throws IOException {
|
||||||
Collection<byte[]> payloads = new ArrayList<byte[]>();
|
Collection<byte[]> payloads = new ArrayList<byte[]>();
|
||||||
|
|
|
@ -139,7 +139,7 @@ public class PayloadTermQuery extends SpanTermQuery {
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* @return {@link #getSpanScore()} * {@link #getPayloadScore()}
|
* @return {@link #getSpanScore()} * {@link #getPayloadScore()}
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
|
@ -154,7 +154,7 @@ public class PayloadTermQuery extends SpanTermQuery {
|
||||||
* Should not be overridden without good cause!
|
* Should not be overridden without good cause!
|
||||||
*
|
*
|
||||||
* @return the score for just the Span part w/o the payload
|
* @return the score for just the Span part w/o the payload
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*
|
*
|
||||||
* @see #score()
|
* @see #score()
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -179,7 +179,7 @@ public abstract class Similarity {
|
||||||
* @param weight collection information from {@link #computeWeight(float, CollectionStatistics, TermStatistics...)}
|
* @param weight collection information from {@link #computeWeight(float, CollectionStatistics, TermStatistics...)}
|
||||||
* @param context segment of the inverted index to be scored.
|
* @param context segment of the inverted index to be scored.
|
||||||
* @return ExactSimScorer for scoring documents across <code>context</code>
|
* @return ExactSimScorer for scoring documents across <code>context</code>
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public abstract ExactSimScorer exactSimScorer(SimWeight weight, AtomicReaderContext context) throws IOException;
|
public abstract ExactSimScorer exactSimScorer(SimWeight weight, AtomicReaderContext context) throws IOException;
|
||||||
|
|
||||||
|
@ -188,7 +188,7 @@ public abstract class Similarity {
|
||||||
* @param weight collection information from {@link #computeWeight(float, CollectionStatistics, TermStatistics...)}
|
* @param weight collection information from {@link #computeWeight(float, CollectionStatistics, TermStatistics...)}
|
||||||
* @param context segment of the inverted index to be scored.
|
* @param context segment of the inverted index to be scored.
|
||||||
* @return SloppySimScorer for scoring documents across <code>context</code>
|
* @return SloppySimScorer for scoring documents across <code>context</code>
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public abstract SloppySimScorer sloppySimScorer(SimWeight weight, AtomicReaderContext context) throws IOException;
|
public abstract SloppySimScorer sloppySimScorer(SimWeight weight, AtomicReaderContext context) throws IOException;
|
||||||
|
|
||||||
|
|
|
@ -219,8 +219,6 @@ public class NearSpansOrdered extends Spans {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Check whether two Spans in the same document are ordered.
|
/** Check whether two Spans in the same document are ordered.
|
||||||
* @param spans1
|
|
||||||
* @param spans2
|
|
||||||
* @return true iff spans1 starts before spans2
|
* @return true iff spans1 starts before spans2
|
||||||
* or the spans start at the same position,
|
* or the spans start at the same position,
|
||||||
* and spans1 ends before spans2.
|
* and spans1 ends before spans2.
|
||||||
|
|
|
@ -241,7 +241,7 @@ public class NearSpansUnordered extends Spans {
|
||||||
/**
|
/**
|
||||||
* WARNING: The List is not necessarily in order of the the positions
|
* WARNING: The List is not necessarily in order of the the positions
|
||||||
* @return Collection of <code>byte[]</code> payloads
|
* @return Collection of <code>byte[]</code> payloads
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Collection<byte[]> getPayload() throws IOException {
|
public Collection<byte[]> getPayload() throws IOException {
|
||||||
|
|
|
@ -69,7 +69,7 @@ public abstract class Spans {
|
||||||
* @lucene.experimental
|
* @lucene.experimental
|
||||||
*
|
*
|
||||||
* @return a List of byte arrays containing the data of this payload, otherwise null if isPayloadAvailable is false
|
* @return a List of byte arrays containing the data of this payload, otherwise null if isPayloadAvailable is false
|
||||||
* @throws java.io.IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
// TODO: Remove warning after API has been finalized
|
// TODO: Remove warning after API has been finalized
|
||||||
public abstract Collection<byte[]> getPayload() throws IOException;
|
public abstract Collection<byte[]> getPayload() throws IOException;
|
||||||
|
|
|
@ -215,14 +215,14 @@ public final class CompoundFileDirectory extends Directory {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Not implemented
|
/** Not implemented
|
||||||
* @throws UnsupportedOperationException */
|
* @throws UnsupportedOperationException always: not supported by CFS */
|
||||||
@Override
|
@Override
|
||||||
public void deleteFile(String name) {
|
public void deleteFile(String name) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Not implemented
|
/** Not implemented
|
||||||
* @throws UnsupportedOperationException */
|
* @throws UnsupportedOperationException always: not supported by CFS */
|
||||||
public void renameFile(String from, String to) {
|
public void renameFile(String from, String to) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
@ -253,7 +253,7 @@ public final class CompoundFileDirectory extends Directory {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Not implemented
|
/** Not implemented
|
||||||
* @throws UnsupportedOperationException */
|
* @throws UnsupportedOperationException always: not supported by CFS */
|
||||||
@Override
|
@Override
|
||||||
public Lock makeLock(String name) {
|
public Lock makeLock(String name) {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
|
|
|
@ -135,7 +135,7 @@ public abstract class FSDirectory extends Directory {
|
||||||
* @param path the path of the directory
|
* @param path the path of the directory
|
||||||
* @param lockFactory the lock factory to use, or null for the default
|
* @param lockFactory the lock factory to use, or null for the default
|
||||||
* ({@link NativeFSLockFactory});
|
* ({@link NativeFSLockFactory});
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
protected FSDirectory(File path, LockFactory lockFactory) throws IOException {
|
protected FSDirectory(File path, LockFactory lockFactory) throws IOException {
|
||||||
// new ctors use always NativeFSLockFactory as default:
|
// new ctors use always NativeFSLockFactory as default:
|
||||||
|
|
|
@ -90,7 +90,7 @@ public class MMapDirectory extends FSDirectory {
|
||||||
* @param path the path of the directory
|
* @param path the path of the directory
|
||||||
* @param lockFactory the lock factory to use, or null for the default
|
* @param lockFactory the lock factory to use, or null for the default
|
||||||
* ({@link NativeFSLockFactory});
|
* ({@link NativeFSLockFactory});
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public MMapDirectory(File path, LockFactory lockFactory) throws IOException {
|
public MMapDirectory(File path, LockFactory lockFactory) throws IOException {
|
||||||
this(path, lockFactory, DEFAULT_MAX_BUFF);
|
this(path, lockFactory, DEFAULT_MAX_BUFF);
|
||||||
|
@ -99,7 +99,7 @@ public class MMapDirectory extends FSDirectory {
|
||||||
/** Create a new MMapDirectory for the named location and {@link NativeFSLockFactory}.
|
/** Create a new MMapDirectory for the named location and {@link NativeFSLockFactory}.
|
||||||
*
|
*
|
||||||
* @param path the path of the directory
|
* @param path the path of the directory
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public MMapDirectory(File path) throws IOException {
|
public MMapDirectory(File path) throws IOException {
|
||||||
this(path, null);
|
this(path, null);
|
||||||
|
@ -123,7 +123,7 @@ public class MMapDirectory extends FSDirectory {
|
||||||
* be {@code 1 << 30}, as the address space is big enough.
|
* be {@code 1 << 30}, as the address space is big enough.
|
||||||
* <p>
|
* <p>
|
||||||
* <b>Please note:</b> The chunk size is always rounded down to a power of 2.
|
* <b>Please note:</b> The chunk size is always rounded down to a power of 2.
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public MMapDirectory(File path, LockFactory lockFactory, int maxChunkSize) throws IOException {
|
public MMapDirectory(File path, LockFactory lockFactory, int maxChunkSize) throws IOException {
|
||||||
super(path, lockFactory);
|
super(path, lockFactory);
|
||||||
|
|
|
@ -58,7 +58,7 @@ public class NIOFSDirectory extends FSDirectory {
|
||||||
* @param path the path of the directory
|
* @param path the path of the directory
|
||||||
* @param lockFactory the lock factory to use, or null for the default
|
* @param lockFactory the lock factory to use, or null for the default
|
||||||
* ({@link NativeFSLockFactory});
|
* ({@link NativeFSLockFactory});
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public NIOFSDirectory(File path, LockFactory lockFactory) throws IOException {
|
public NIOFSDirectory(File path, LockFactory lockFactory) throws IOException {
|
||||||
super(path, lockFactory);
|
super(path, lockFactory);
|
||||||
|
@ -67,7 +67,7 @@ public class NIOFSDirectory extends FSDirectory {
|
||||||
/** Create a new NIOFSDirectory for the named location and {@link NativeFSLockFactory}.
|
/** Create a new NIOFSDirectory for the named location and {@link NativeFSLockFactory}.
|
||||||
*
|
*
|
||||||
* @param path the path of the directory
|
* @param path the path of the directory
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public NIOFSDirectory(File path) throws IOException {
|
public NIOFSDirectory(File path) throws IOException {
|
||||||
super(path, null);
|
super(path, null);
|
||||||
|
|
|
@ -35,7 +35,7 @@ public class SimpleFSDirectory extends FSDirectory {
|
||||||
* @param path the path of the directory
|
* @param path the path of the directory
|
||||||
* @param lockFactory the lock factory to use, or null for the default
|
* @param lockFactory the lock factory to use, or null for the default
|
||||||
* ({@link NativeFSLockFactory});
|
* ({@link NativeFSLockFactory});
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public SimpleFSDirectory(File path, LockFactory lockFactory) throws IOException {
|
public SimpleFSDirectory(File path, LockFactory lockFactory) throws IOException {
|
||||||
super(path, lockFactory);
|
super(path, lockFactory);
|
||||||
|
@ -44,7 +44,7 @@ public class SimpleFSDirectory extends FSDirectory {
|
||||||
/** Create a new SimpleFSDirectory for the named location and {@link NativeFSLockFactory}.
|
/** Create a new SimpleFSDirectory for the named location and {@link NativeFSLockFactory}.
|
||||||
*
|
*
|
||||||
* @param path the path of the directory
|
* @param path the path of the directory
|
||||||
* @throws IOException
|
* @throws IOException if there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
public SimpleFSDirectory(File path) throws IOException {
|
public SimpleFSDirectory(File path) throws IOException {
|
||||||
super(path, null);
|
super(path, null);
|
||||||
|
|
|
@ -262,7 +262,7 @@ public final class BytesRef implements Comparable<BytesRef>,Cloneable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @deprecated */
|
/** @deprecated This comparator is only a transition mechanism */
|
||||||
@Deprecated
|
@Deprecated
|
||||||
private final static Comparator<BytesRef> utf8SortedAsUTF16SortOrder = new UTF8SortedAsUTF16Comparator();
|
private final static Comparator<BytesRef> utf8SortedAsUTF16SortOrder = new UTF8SortedAsUTF16Comparator();
|
||||||
|
|
||||||
|
@ -272,7 +272,7 @@ public final class BytesRef implements Comparable<BytesRef>,Cloneable {
|
||||||
return utf8SortedAsUTF16SortOrder;
|
return utf8SortedAsUTF16SortOrder;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @deprecated */
|
/** @deprecated This comparator is only a transition mechanism */
|
||||||
@Deprecated
|
@Deprecated
|
||||||
private static class UTF8SortedAsUTF16Comparator implements Comparator<BytesRef> {
|
private static class UTF8SortedAsUTF16Comparator implements Comparator<BytesRef> {
|
||||||
// Only singleton
|
// Only singleton
|
||||||
|
|
|
@ -34,7 +34,7 @@ public interface BytesRefIterator {
|
||||||
*
|
*
|
||||||
* @return the next {@link BytesRef} in the iterator or <code>null</code> if
|
* @return the next {@link BytesRef} in the iterator or <code>null</code> if
|
||||||
* the end of the iterator is reached.
|
* the end of the iterator is reached.
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public BytesRef next() throws IOException;
|
public BytesRef next() throws IOException;
|
||||||
|
|
||||||
|
|
|
@ -217,7 +217,7 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence, Clone
|
||||||
return new CharsRef(chars, offset + start, offset + end);
|
return new CharsRef(chars, offset + start, offset + end);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @deprecated */
|
/** @deprecated This comparator is only a transition mechanism */
|
||||||
@Deprecated
|
@Deprecated
|
||||||
private final static Comparator<CharsRef> utf16SortedAsUTF8SortOrder = new UTF16SortedAsUTF8Comparator();
|
private final static Comparator<CharsRef> utf16SortedAsUTF8SortOrder = new UTF16SortedAsUTF8Comparator();
|
||||||
|
|
||||||
|
@ -227,7 +227,7 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence, Clone
|
||||||
return utf16SortedAsUTF8SortOrder;
|
return utf16SortedAsUTF8SortOrder;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @deprecated */
|
/** @deprecated This comparator is only a transition mechanism */
|
||||||
@Deprecated
|
@Deprecated
|
||||||
private static class UTF16SortedAsUTF8Comparator implements Comparator<CharsRef> {
|
private static class UTF16SortedAsUTF8Comparator implements Comparator<CharsRef> {
|
||||||
// Only singleton
|
// Only singleton
|
||||||
|
|
|
@ -62,7 +62,7 @@ public final class CommandLineUtil {
|
||||||
* Loads a specific Directory implementation
|
* Loads a specific Directory implementation
|
||||||
* @param clazzName The name of the Directory class to load
|
* @param clazzName The name of the Directory class to load
|
||||||
* @return The Directory class loaded
|
* @return The Directory class loaded
|
||||||
* @throws ClassNotFoundException
|
* @throws ClassNotFoundException If the specified class cannot be found.
|
||||||
*/
|
*/
|
||||||
public static Class<? extends Directory> loadDirectoryClass(String clazzName)
|
public static Class<? extends Directory> loadDirectoryClass(String clazzName)
|
||||||
throws ClassNotFoundException {
|
throws ClassNotFoundException {
|
||||||
|
@ -73,7 +73,7 @@ public final class CommandLineUtil {
|
||||||
* Loads a specific FSDirectory implementation
|
* Loads a specific FSDirectory implementation
|
||||||
* @param clazzName The name of the FSDirectory class to load
|
* @param clazzName The name of the FSDirectory class to load
|
||||||
* @return The FSDirectory class loaded
|
* @return The FSDirectory class loaded
|
||||||
* @throws ClassNotFoundException
|
* @throws ClassNotFoundException If the specified class cannot be found.
|
||||||
*/
|
*/
|
||||||
public static Class<? extends FSDirectory> loadFSDirectoryClass(String clazzName)
|
public static Class<? extends FSDirectory> loadFSDirectoryClass(String clazzName)
|
||||||
throws ClassNotFoundException {
|
throws ClassNotFoundException {
|
||||||
|
@ -97,10 +97,10 @@ public final class CommandLineUtil {
|
||||||
* @param clazz The class of the object to be created
|
* @param clazz The class of the object to be created
|
||||||
* @param file The file to be used as parameter constructor
|
* @param file The file to be used as parameter constructor
|
||||||
* @return The new FSDirectory instance
|
* @return The new FSDirectory instance
|
||||||
* @throws NoSuchMethodException
|
* @throws NoSuchMethodException If the Directory does not have a constructor that takes <code>File</code>.
|
||||||
* @throws InstantiationException
|
* @throws InstantiationException If the class is abstract or an interface.
|
||||||
* @throws IllegalAccessException
|
* @throws IllegalAccessException If the constructor does not have public visibility.
|
||||||
* @throws InvocationTargetException
|
* @throws InvocationTargetException If the constructor throws an exception
|
||||||
*/
|
*/
|
||||||
public static FSDirectory newFSDirectory(Class<? extends FSDirectory> clazz, File file)
|
public static FSDirectory newFSDirectory(Class<? extends FSDirectory> clazz, File file)
|
||||||
throws NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException {
|
throws NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException {
|
||||||
|
|
|
@ -81,9 +81,7 @@ public class OpenBitSet extends DocIdSet implements Bits, Cloneable {
|
||||||
// Used only for assert:
|
// Used only for assert:
|
||||||
private long numBits;
|
private long numBits;
|
||||||
|
|
||||||
/** Constructs an OpenBitSet large enough to hold numBits.
|
/** Constructs an OpenBitSet large enough to hold <code>numBits</code>.
|
||||||
*
|
|
||||||
* @param numBits
|
|
||||||
*/
|
*/
|
||||||
public OpenBitSet(long numBits) {
|
public OpenBitSet(long numBits) {
|
||||||
this.numBits = numBits;
|
this.numBits = numBits;
|
||||||
|
|
|
@ -712,7 +712,7 @@ public class PackedInts {
|
||||||
* @param valueCount how many values the stream holds
|
* @param valueCount how many values the stream holds
|
||||||
* @param bitsPerValue the number of bits per value
|
* @param bitsPerValue the number of bits per value
|
||||||
* @return a Reader
|
* @return a Reader
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
* @see PackedInts#getWriterNoHeader(DataOutput, Format, int, int, int)
|
* @see PackedInts#getWriterNoHeader(DataOutput, Format, int, int, int)
|
||||||
* @lucene.internal
|
* @lucene.internal
|
||||||
*/
|
*/
|
||||||
|
@ -754,7 +754,7 @@ public class PackedInts {
|
||||||
*
|
*
|
||||||
* @param in the stream to read data from
|
* @param in the stream to read data from
|
||||||
* @return a Reader
|
* @return a Reader
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
* @lucene.internal
|
* @lucene.internal
|
||||||
*/
|
*/
|
||||||
public static Reader getReader(DataInput in) throws IOException {
|
public static Reader getReader(DataInput in) throws IOException {
|
||||||
|
@ -846,7 +846,7 @@ public class PackedInts {
|
||||||
*
|
*
|
||||||
* @param in the stream to read data from
|
* @param in the stream to read data from
|
||||||
* @return a direct Reader
|
* @return a direct Reader
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
* @lucene.internal
|
* @lucene.internal
|
||||||
*/
|
*/
|
||||||
public static Reader getDirectReader(IndexInput in) throws IOException {
|
public static Reader getDirectReader(IndexInput in) throws IOException {
|
||||||
|
@ -990,7 +990,7 @@ public class PackedInts {
|
||||||
* @param bitsPerValue the number of bits per value
|
* @param bitsPerValue the number of bits per value
|
||||||
* @param acceptableOverheadRatio an acceptable overhead ratio per value
|
* @param acceptableOverheadRatio an acceptable overhead ratio per value
|
||||||
* @return a Writer
|
* @return a Writer
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
* @lucene.internal
|
* @lucene.internal
|
||||||
*/
|
*/
|
||||||
public static Writer getWriter(DataOutput out,
|
public static Writer getWriter(DataOutput out,
|
||||||
|
|
|
@ -33,7 +33,6 @@ public class TestBitVector extends LuceneTestCase
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test the default constructor on BitVectors of various sizes.
|
* Test the default constructor on BitVectors of various sizes.
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void testConstructSize() throws Exception {
|
public void testConstructSize() throws Exception {
|
||||||
doTestConstructOfSize(8);
|
doTestConstructOfSize(8);
|
||||||
|
@ -49,7 +48,6 @@ public class TestBitVector extends LuceneTestCase
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test the get() and set() methods on BitVectors of various sizes.
|
* Test the get() and set() methods on BitVectors of various sizes.
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void testGetSet() throws Exception {
|
public void testGetSet() throws Exception {
|
||||||
doTestGetSetVectorOfSize(8);
|
doTestGetSetVectorOfSize(8);
|
||||||
|
@ -70,7 +68,6 @@ public class TestBitVector extends LuceneTestCase
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test the clear() method on BitVectors of various sizes.
|
* Test the clear() method on BitVectors of various sizes.
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void testClear() throws Exception {
|
public void testClear() throws Exception {
|
||||||
doTestClearVectorOfSize(8);
|
doTestClearVectorOfSize(8);
|
||||||
|
@ -93,7 +90,6 @@ public class TestBitVector extends LuceneTestCase
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test the count() method on BitVectors of various sizes.
|
* Test the count() method on BitVectors of various sizes.
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void testCount() throws Exception {
|
public void testCount() throws Exception {
|
||||||
doTestCountVectorOfSize(8);
|
doTestCountVectorOfSize(8);
|
||||||
|
@ -129,7 +125,6 @@ public class TestBitVector extends LuceneTestCase
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test writing and construction to/from Directory.
|
* Test writing and construction to/from Directory.
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void testWriteRead() throws Exception {
|
public void testWriteRead() throws Exception {
|
||||||
doTestWriteRead(8);
|
doTestWriteRead(8);
|
||||||
|
|
|
@ -41,8 +41,6 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* LUCENE-3627: This test fails.
|
* LUCENE-3627: This test fails.
|
||||||
*
|
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void testCrashCorruptsIndexing() throws Exception {
|
public void testCrashCorruptsIndexing() throws Exception {
|
||||||
path = _TestUtil.getTempDir("testCrashCorruptsIndexing");
|
path = _TestUtil.getTempDir("testCrashCorruptsIndexing");
|
||||||
|
@ -60,8 +58,6 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
|
||||||
* index 1 document and commit.
|
* index 1 document and commit.
|
||||||
* prepare for crashing.
|
* prepare for crashing.
|
||||||
* index 1 more document, and upon commit, creation of segments_2 will crash.
|
* index 1 more document, and upon commit, creation of segments_2 will crash.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
private void indexAndCrashOnCreateOutputSegments2() throws IOException {
|
private void indexAndCrashOnCreateOutputSegments2() throws IOException {
|
||||||
Directory realDirectory = FSDirectory.open(path);
|
Directory realDirectory = FSDirectory.open(path);
|
||||||
|
@ -93,8 +89,6 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Attempts to index another 1 document.
|
* Attempts to index another 1 document.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
private void indexAfterRestart() throws IOException {
|
private void indexAfterRestart() throws IOException {
|
||||||
Directory realDirectory = newFSDirectory(path);
|
Directory realDirectory = newFSDirectory(path);
|
||||||
|
@ -115,8 +109,6 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Run an example search.
|
* Run an example search.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
private void searchForFleas(final int expectedTotalHits) throws IOException {
|
private void searchForFleas(final int expectedTotalHits) throws IOException {
|
||||||
Directory realDirectory = newFSDirectory(path);
|
Directory realDirectory = newFSDirectory(path);
|
||||||
|
|
|
@ -644,8 +644,6 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
* Test that no NullPointerException will be raised,
|
* Test that no NullPointerException will be raised,
|
||||||
* when adding one document with a single, empty field
|
* when adding one document with a single, empty field
|
||||||
* and term vectors enabled.
|
* and term vectors enabled.
|
||||||
* @throws IOException
|
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
public void testBadSegment() throws IOException {
|
public void testBadSegment() throws IOException {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
|
|
|
@ -223,8 +223,6 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test using IW.addIndexes
|
* Test using IW.addIndexes
|
||||||
*
|
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void testAddIndexes() throws Exception {
|
public void testAddIndexes() throws Exception {
|
||||||
boolean doFullMerge = false;
|
boolean doFullMerge = false;
|
||||||
|
@ -310,8 +308,6 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Deletes using IW.deleteDocuments
|
* Deletes using IW.deleteDocuments
|
||||||
*
|
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void testDeleteFromIndexWriter() throws Exception {
|
public void testDeleteFromIndexWriter() throws Exception {
|
||||||
boolean doFullMerge = true;
|
boolean doFullMerge = true;
|
||||||
|
|
|
@ -56,8 +56,6 @@ public class TestIsCurrent extends LuceneTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Failing testcase showing the trouble
|
* Failing testcase showing the trouble
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteByTermIsCurrent() throws IOException {
|
public void testDeleteByTermIsCurrent() throws IOException {
|
||||||
|
@ -83,8 +81,6 @@ public class TestIsCurrent extends LuceneTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Testcase for example to show that writer.deleteAll() is working as expected
|
* Testcase for example to show that writer.deleteAll() is working as expected
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteAllIsCurrent() throws IOException {
|
public void testDeleteAllIsCurrent() throws IOException {
|
||||||
|
|
|
@ -36,8 +36,6 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
||||||
/**
|
/**
|
||||||
* Creates two empty indexes and wraps a ParallelReader around. Adding this
|
* Creates two empty indexes and wraps a ParallelReader around. Adding this
|
||||||
* reader to a new index should not throw any exception.
|
* reader to a new index should not throw any exception.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public void testEmptyIndex() throws IOException {
|
public void testEmptyIndex() throws IOException {
|
||||||
Directory rd1 = newDirectory();
|
Directory rd1 = newDirectory();
|
||||||
|
|
|
@ -59,8 +59,6 @@ public class TestBooleanOr extends LuceneTestCase {
|
||||||
/**
|
/**
|
||||||
* <code>T:files T:deleting C:production C:optimize </code>
|
* <code>T:files T:deleting C:production C:optimize </code>
|
||||||
* it works.
|
* it works.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public void testFlat() throws IOException {
|
public void testFlat() throws IOException {
|
||||||
BooleanQuery q = new BooleanQuery();
|
BooleanQuery q = new BooleanQuery();
|
||||||
|
@ -74,8 +72,6 @@ public class TestBooleanOr extends LuceneTestCase {
|
||||||
/**
|
/**
|
||||||
* <code>(T:files T:deleting) (+C:production +C:optimize)</code>
|
* <code>(T:files T:deleting) (+C:production +C:optimize)</code>
|
||||||
* it works.
|
* it works.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public void testParenthesisMust() throws IOException {
|
public void testParenthesisMust() throws IOException {
|
||||||
BooleanQuery q3 = new BooleanQuery();
|
BooleanQuery q3 = new BooleanQuery();
|
||||||
|
@ -93,8 +89,6 @@ public class TestBooleanOr extends LuceneTestCase {
|
||||||
/**
|
/**
|
||||||
* <code>(T:files T:deleting) +(C:production C:optimize)</code>
|
* <code>(T:files T:deleting) +(C:production C:optimize)</code>
|
||||||
* not working. results NO HIT.
|
* not working. results NO HIT.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public void testParenthesisMust2() throws IOException {
|
public void testParenthesisMust2() throws IOException {
|
||||||
BooleanQuery q3 = new BooleanQuery();
|
BooleanQuery q3 = new BooleanQuery();
|
||||||
|
@ -112,8 +106,6 @@ public class TestBooleanOr extends LuceneTestCase {
|
||||||
/**
|
/**
|
||||||
* <code>(T:files T:deleting) (C:production C:optimize)</code>
|
* <code>(T:files T:deleting) (C:production C:optimize)</code>
|
||||||
* not working. results NO HIT.
|
* not working. results NO HIT.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public void testParenthesisShould() throws IOException {
|
public void testParenthesisShould() throws IOException {
|
||||||
BooleanQuery q3 = new BooleanQuery();
|
BooleanQuery q3 = new BooleanQuery();
|
||||||
|
|
|
@ -152,8 +152,6 @@ public class TestCustomSearcherSort extends LuceneTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check the hits for duplicates.
|
* Check the hits for duplicates.
|
||||||
*
|
|
||||||
* @param hits
|
|
||||||
*/
|
*/
|
||||||
private void checkHits(ScoreDoc[] hits, String prefix) {
|
private void checkHits(ScoreDoc[] hits, String prefix) {
|
||||||
if (hits != null) {
|
if (hits != null) {
|
||||||
|
@ -186,9 +184,6 @@ public class TestCustomSearcherSort extends LuceneTestCase {
|
||||||
public class CustomSearcher extends IndexSearcher {
|
public class CustomSearcher extends IndexSearcher {
|
||||||
private int switcher;
|
private int switcher;
|
||||||
|
|
||||||
/**
|
|
||||||
* @param r
|
|
||||||
*/
|
|
||||||
public CustomSearcher(IndexReader r, int switcher) {
|
public CustomSearcher(IndexReader r, int switcher) {
|
||||||
super(r);
|
super(r);
|
||||||
this.switcher = switcher;
|
this.switcher = switcher;
|
||||||
|
|
|
@ -269,7 +269,6 @@ public class TestWildcard
|
||||||
* This test looks at both parsing and execution of wildcard queries.
|
* This test looks at both parsing and execution of wildcard queries.
|
||||||
* Although placed here, it also tests prefix queries, verifying that
|
* Although placed here, it also tests prefix queries, verifying that
|
||||||
* prefix queries are not parsed into wild card queries, and viceversa.
|
* prefix queries are not parsed into wild card queries, and viceversa.
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void testParsingAndSearching() throws Exception {
|
public void testParsingAndSearching() throws Exception {
|
||||||
String field = "content";
|
String field = "content";
|
||||||
|
|
|
@ -113,7 +113,6 @@ public class PayloadHelper {
|
||||||
* @param similarity The Similarity class to use in the Searcher
|
* @param similarity The Similarity class to use in the Searcher
|
||||||
* @param numDocs The num docs to add
|
* @param numDocs The num docs to add
|
||||||
* @return An IndexSearcher
|
* @return An IndexSearcher
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
// TODO: randomize
|
// TODO: randomize
|
||||||
public IndexSearcher setUp(Random random, Similarity similarity, int numDocs) throws IOException {
|
public IndexSearcher setUp(Random random, Similarity similarity, int numDocs) throws IOException {
|
||||||
|
|
|
@ -86,7 +86,6 @@ public class TestSpansAdvanced extends LuceneTestCase {
|
||||||
* @param writer the Lucene index writer
|
* @param writer the Lucene index writer
|
||||||
* @param id the unique id of the document
|
* @param id the unique id of the document
|
||||||
* @param text the text of the document
|
* @param text the text of the document
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
protected void addDocument(final RandomIndexWriter writer, final String id,
|
protected void addDocument(final RandomIndexWriter writer, final String id,
|
||||||
final String text) throws IOException {
|
final String text) throws IOException {
|
||||||
|
@ -99,8 +98,6 @@ public class TestSpansAdvanced extends LuceneTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests two span queries.
|
* Tests two span queries.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public void testBooleanQueryWithSpanQueries() throws IOException {
|
public void testBooleanQueryWithSpanQueries() throws IOException {
|
||||||
|
|
||||||
|
@ -109,8 +106,6 @@ public class TestSpansAdvanced extends LuceneTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests two span queries.
|
* Tests two span queries.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
protected void doTestBooleanQueryWithSpanQueries(IndexSearcher s,
|
protected void doTestBooleanQueryWithSpanQueries(IndexSearcher s,
|
||||||
final float expectedScore) throws IOException {
|
final float expectedScore) throws IOException {
|
||||||
|
@ -132,8 +127,6 @@ public class TestSpansAdvanced extends LuceneTestCase {
|
||||||
* @param description the description of the search
|
* @param description the description of the search
|
||||||
* @param expectedIds the expected document ids of the hits
|
* @param expectedIds the expected document ids of the hits
|
||||||
* @param expectedScores the expected scores of the hits
|
* @param expectedScores the expected scores of the hits
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
protected static void assertHits(IndexSearcher s, Query query,
|
protected static void assertHits(IndexSearcher s, Query query,
|
||||||
final String description, final String[] expectedIds,
|
final String description, final String[] expectedIds,
|
||||||
|
|
|
@ -72,8 +72,6 @@ public class TestSpansAdvanced2 extends TestSpansAdvanced {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Verifies that the index has the correct number of documents.
|
* Verifies that the index has the correct number of documents.
|
||||||
*
|
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
public void testVerifyIndex() throws Exception {
|
public void testVerifyIndex() throws Exception {
|
||||||
final IndexReader reader = DirectoryReader.open(mDirectory);
|
final IndexReader reader = DirectoryReader.open(mDirectory);
|
||||||
|
@ -83,8 +81,6 @@ public class TestSpansAdvanced2 extends TestSpansAdvanced {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests a single span query that matches multiple documents.
|
* Tests a single span query that matches multiple documents.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public void testSingleSpanQuery() throws IOException {
|
public void testSingleSpanQuery() throws IOException {
|
||||||
|
|
||||||
|
@ -99,8 +95,6 @@ public class TestSpansAdvanced2 extends TestSpansAdvanced {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests a single span query that matches multiple documents.
|
* Tests a single span query that matches multiple documents.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public void testMultipleDifferentSpanQueries() throws IOException {
|
public void testMultipleDifferentSpanQueries() throws IOException {
|
||||||
|
|
||||||
|
@ -119,8 +113,6 @@ public class TestSpansAdvanced2 extends TestSpansAdvanced {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests two span queries.
|
* Tests two span queries.
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void testBooleanQueryWithSpanQueries() throws IOException {
|
public void testBooleanQueryWithSpanQueries() throws IOException {
|
||||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.lucene.util._TestUtil;
|
||||||
public class TestFileSwitchDirectory extends LuceneTestCase {
|
public class TestFileSwitchDirectory extends LuceneTestCase {
|
||||||
/**
|
/**
|
||||||
* Test if writing doc stores to disk and everything else to ram works.
|
* Test if writing doc stores to disk and everything else to ram works.
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public void testBasic() throws IOException {
|
public void testBasic() throws IOException {
|
||||||
Set<String> fileExtensions = new HashSet<String>();
|
Set<String> fileExtensions = new HashSet<String>();
|
||||||
|
|
|
@ -140,7 +140,7 @@ public class IndexFiles {
|
||||||
*
|
*
|
||||||
* @param writer Writer to the index where the given file/dir info will be stored
|
* @param writer Writer to the index where the given file/dir info will be stored
|
||||||
* @param file The file to index, or the directory to recurse into to find files to index
|
* @param file The file to index, or the directory to recurse into to find files to index
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error
|
||||||
*/
|
*/
|
||||||
static void indexDocs(IndexWriter writer, File file)
|
static void indexDocs(IndexWriter writer, File file)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
|
@ -43,7 +43,6 @@ import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||||
public class EnhancementsDocumentBuilder extends CategoryDocumentBuilder {
|
public class EnhancementsDocumentBuilder extends CategoryDocumentBuilder {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param taxonomyWriter
|
|
||||||
* @param params
|
* @param params
|
||||||
* Indexing params which include {@link CategoryEnhancement}s.
|
* Indexing params which include {@link CategoryEnhancement}s.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -50,7 +50,7 @@ public class EnhancementsPayloadIterator extends PayloadIterator {
|
||||||
* A reader of the index.
|
* A reader of the index.
|
||||||
* @param term
|
* @param term
|
||||||
* The category term to iterate.
|
* The category term to iterate.
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public EnhancementsPayloadIterator(
|
public EnhancementsPayloadIterator(
|
||||||
List<CategoryEnhancement> enhancementsList,
|
List<CategoryEnhancement> enhancementsList,
|
||||||
|
|
|
@ -204,7 +204,7 @@ public class CategoryContainer implements Iterable<CategoryAttribute>, Serializa
|
||||||
* {@link ObjectOutputStream}.<br>
|
* {@link ObjectOutputStream}.<br>
|
||||||
* NOTE: {@link CategoryProperty}s are {@link Serializable}, but do not
|
* NOTE: {@link CategoryProperty}s are {@link Serializable}, but do not
|
||||||
* assume that Lucene's {@link Attribute}s are as well
|
* assume that Lucene's {@link Attribute}s are as well
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
protected void serializeCategoryAttribute(ObjectOutputStream out,
|
protected void serializeCategoryAttribute(ObjectOutputStream out,
|
||||||
CategoryAttribute ca) throws IOException {
|
CategoryAttribute ca) throws IOException {
|
||||||
|
|
|
@ -128,7 +128,7 @@ public class CategoryDocumentBuilder {
|
||||||
* @return This CategoryDocumentBuilder, to enable this one line call:
|
* @return This CategoryDocumentBuilder, to enable this one line call:
|
||||||
* {@code new} {@link #CategoryDocumentBuilder(TaxonomyWriter)}.
|
* {@code new} {@link #CategoryDocumentBuilder(TaxonomyWriter)}.
|
||||||
* {@link #setCategoryPaths(Iterable)}.{@link #build(Document)}.
|
* {@link #setCategoryPaths(Iterable)}.{@link #build(Document)}.
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public CategoryDocumentBuilder setCategoryPaths(
|
public CategoryDocumentBuilder setCategoryPaths(
|
||||||
Iterable<CategoryPath> categoryPaths) throws IOException {
|
Iterable<CategoryPath> categoryPaths) throws IOException {
|
||||||
|
@ -150,7 +150,7 @@ public class CategoryDocumentBuilder {
|
||||||
* @return This CategoryDocumentBuilder, to enable this one line call:
|
* @return This CategoryDocumentBuilder, to enable this one line call:
|
||||||
* {@code new} {@link #CategoryDocumentBuilder(TaxonomyWriter)}.
|
* {@code new} {@link #CategoryDocumentBuilder(TaxonomyWriter)}.
|
||||||
* {@link #setCategories(Iterable)}.{@link #build(Document)}.
|
* {@link #setCategories(Iterable)}.{@link #build(Document)}.
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
public CategoryDocumentBuilder setCategories(
|
public CategoryDocumentBuilder setCategories(
|
||||||
Iterable<CategoryAttribute> categories) throws IOException {
|
Iterable<CategoryAttribute> categories) throws IOException {
|
||||||
|
@ -277,7 +277,7 @@ public class CategoryDocumentBuilder {
|
||||||
* relevant data.
|
* relevant data.
|
||||||
* @return The {@link CategoryTokenizer} to be used in creating category
|
* @return The {@link CategoryTokenizer} to be used in creating category
|
||||||
* tokens.
|
* tokens.
|
||||||
* @throws IOException
|
* @throws IOException If there is a low-level I/O error.
|
||||||
*/
|
*/
|
||||||
protected CategoryTokenizer getCategoryTokenizer(TokenStream categoryStream)
|
protected CategoryTokenizer getCategoryTokenizer(TokenStream categoryStream)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
|
@ -95,9 +95,6 @@ public class PerDimensionIndexingParams extends DefaultFacetIndexingParams {
|
||||||
/**
|
/**
|
||||||
* Add a CategoryListParams for a given CategoryPath's dimension or
|
* Add a CategoryListParams for a given CategoryPath's dimension or
|
||||||
* "zero-th" category.
|
* "zero-th" category.
|
||||||
*
|
|
||||||
* @param category
|
|
||||||
* @param clParams
|
|
||||||
*/
|
*/
|
||||||
public void addCategoryListParams(CategoryPath category, CategoryListParams clParams) {
|
public void addCategoryListParams(CategoryPath category, CategoryListParams clParams) {
|
||||||
clParamsMap.put(category.getComponent(0), clParams);
|
clParamsMap.put(category.getComponent(0), clParams);
|
||||||
|
|
|
@ -81,7 +81,7 @@ public abstract class FacetsAccumulator {
|
||||||
public abstract List<FacetResult> accumulate(ScoredDocIDs docids) throws IOException;
|
public abstract List<FacetResult> accumulate(ScoredDocIDs docids) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the complement threshold
|
* Returns the complement threshold.
|
||||||
* @see #setComplementThreshold(double)
|
* @see #setComplementThreshold(double)
|
||||||
*/
|
*/
|
||||||
public double getComplementThreshold() {
|
public double getComplementThreshold() {
|
||||||
|
@ -107,6 +107,7 @@ public abstract class FacetsAccumulator {
|
||||||
* <p>
|
* <p>
|
||||||
* To disable complements pass {@link #DISABLE_COMPLEMENT}.
|
* To disable complements pass {@link #DISABLE_COMPLEMENT}.
|
||||||
* @param complementThreshold the complement threshold to set
|
* @param complementThreshold the complement threshold to set
|
||||||
|
* @see #getComplementThreshold()
|
||||||
*/
|
*/
|
||||||
public void setComplementThreshold(double complementThreshold) {
|
public void setComplementThreshold(double complementThreshold) {
|
||||||
this.complementThreshold = complementThreshold;
|
this.complementThreshold = complementThreshold;
|
||||||
|
|
|
@ -88,18 +88,11 @@ public class SamplingWrapper extends FacetsAccumulator {
|
||||||
return fixedRes;
|
return fixedRes;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see FacetsAccumulator#getComplementThreshold()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public double getComplementThreshold() {
|
public double getComplementThreshold() {
|
||||||
return delegee.getComplementThreshold();
|
return delegee.getComplementThreshold();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param complementThreshold
|
|
||||||
* @see FacetsAccumulator#setComplementThreshold(double)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public void setComplementThreshold(double complementThreshold) {
|
public void setComplementThreshold(double complementThreshold) {
|
||||||
delegee.setComplementThreshold(complementThreshold);
|
delegee.setComplementThreshold(complementThreshold);
|
||||||
|
|
|
@ -230,10 +230,7 @@ public class StandardFacetsAccumulator extends FacetsAccumulator {
|
||||||
/**
|
/**
|
||||||
* Iterate over the documents for this partition and fill the facet arrays with the correct
|
* Iterate over the documents for this partition and fill the facet arrays with the correct
|
||||||
* count/complement count/value.
|
* count/complement count/value.
|
||||||
* @param docids
|
* @throws IOException If there is a low-level I/O error.
|
||||||
* @param facetArrays
|
|
||||||
* @param partition
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
private final void fillArraysForPartition(ScoredDocIDs docids,
|
private final void fillArraysForPartition(ScoredDocIDs docids,
|
||||||
FacetArrays facetArrays, int partition) throws IOException {
|
FacetArrays facetArrays, int partition) throws IOException {
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue