mirror of https://github.com/apache/lucene.git
Upgrade google java format and apply tidy (#11811)
This commit is contained in:
parent
8bdfa90ea9
commit
54fba99cb1
|
@ -37,7 +37,7 @@ configure(project(":lucene").subprojects) { prj ->
|
|||
|
||||
lineEndings 'UNIX'
|
||||
endWithNewline()
|
||||
googleJavaFormat('1.13.0')
|
||||
googleJavaFormat('1.15.0')
|
||||
|
||||
// Apply to all Java sources
|
||||
target "src/**/*.java"
|
||||
|
|
|
@ -120,6 +120,7 @@ Other
|
|||
---------------------
|
||||
* LUCENE-10423: Remove usages of System.currentTimeMillis() from tests. (Marios Trivyzas)
|
||||
|
||||
* GITHUB#11811: upgrade google java format to 1.15.0 (Dawid Weiss)
|
||||
|
||||
======================== Lucene 9.4.0 =======================
|
||||
|
||||
|
|
|
@ -81,7 +81,9 @@ public final class ClassicAnalyzer extends StopwordAnalyzerBase {
|
|||
maxTokenLength = length;
|
||||
}
|
||||
|
||||
/** @see #setMaxTokenLength */
|
||||
/**
|
||||
* @see #setMaxTokenLength
|
||||
*/
|
||||
public int getMaxTokenLength() {
|
||||
return maxTokenLength;
|
||||
}
|
||||
|
|
|
@ -88,7 +88,9 @@ public final class ClassicTokenizer extends Tokenizer {
|
|||
this.maxTokenLength = length;
|
||||
}
|
||||
|
||||
/** @see #setMaxTokenLength */
|
||||
/**
|
||||
* @see #setMaxTokenLength
|
||||
*/
|
||||
public int getMaxTokenLength() {
|
||||
return maxTokenLength;
|
||||
}
|
||||
|
|
|
@ -31,12 +31,16 @@ public class Hyphenation {
|
|||
hyphenPoints = points;
|
||||
}
|
||||
|
||||
/** @return the number of hyphenation points in the word */
|
||||
/**
|
||||
* @return the number of hyphenation points in the word
|
||||
*/
|
||||
public int length() {
|
||||
return hyphenPoints.length;
|
||||
}
|
||||
|
||||
/** @return the hyphenation points */
|
||||
/**
|
||||
* @return the hyphenation points
|
||||
*/
|
||||
public int[] getHyphenationPoints() {
|
||||
return hyphenPoints;
|
||||
}
|
||||
|
|
|
@ -314,7 +314,9 @@ public class PatternParser extends DefaultHandler {
|
|||
}
|
||||
}
|
||||
|
||||
/** @see org.xml.sax.ContentHandler#characters(char[], int, int) */
|
||||
/**
|
||||
* @see org.xml.sax.ContentHandler#characters(char[], int, int)
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@Override
|
||||
public void characters(char[] ch, int start, int length) {
|
||||
|
|
|
@ -51,7 +51,9 @@ public final class GermanStemFilter extends TokenFilter {
|
|||
super(in);
|
||||
}
|
||||
|
||||
/** @return Returns true for next token in the stream, or false at EOS */
|
||||
/**
|
||||
* @return Returns true for next token in the stream, or false at EOS
|
||||
*/
|
||||
@Override
|
||||
public boolean incrementToken() throws IOException {
|
||||
if (input.incrementToken()) {
|
||||
|
|
|
@ -79,7 +79,9 @@ public final class UAX29URLEmailAnalyzer extends StopwordAnalyzerBase {
|
|||
maxTokenLength = length;
|
||||
}
|
||||
|
||||
/** @see #setMaxTokenLength */
|
||||
/**
|
||||
* @see #setMaxTokenLength
|
||||
*/
|
||||
public int getMaxTokenLength() {
|
||||
return maxTokenLength;
|
||||
}
|
||||
|
|
|
@ -100,7 +100,9 @@ public final class UAX29URLEmailTokenizer extends Tokenizer {
|
|||
}
|
||||
}
|
||||
|
||||
/** @see #setMaxTokenLength */
|
||||
/**
|
||||
* @see #setMaxTokenLength
|
||||
*/
|
||||
public int getMaxTokenLength() {
|
||||
return maxTokenLength;
|
||||
}
|
||||
|
|
|
@ -36,22 +36,30 @@ public final class AffixedWord {
|
|||
this.suffixes = Collections.unmodifiableList(suffixes);
|
||||
}
|
||||
|
||||
/** @return the word being analyzed */
|
||||
/**
|
||||
* @return the word being analyzed
|
||||
*/
|
||||
public String getWord() {
|
||||
return word;
|
||||
}
|
||||
|
||||
/** @return the dictionary entry for the stem in this analysis */
|
||||
/**
|
||||
* @return the dictionary entry for the stem in this analysis
|
||||
*/
|
||||
public DictEntry getDictEntry() {
|
||||
return entry;
|
||||
}
|
||||
|
||||
/** @return the list of prefixes applied to the stem, at most two, outermost first */
|
||||
/**
|
||||
* @return the list of prefixes applied to the stem, at most two, outermost first
|
||||
*/
|
||||
public List<Affix> getPrefixes() {
|
||||
return prefixes;
|
||||
}
|
||||
|
||||
/** @return the list of suffixes applied to the stem, at most two, outermost first */
|
||||
/**
|
||||
* @return the list of suffixes applied to the stem, at most two, outermost first
|
||||
*/
|
||||
public List<Affix> getSuffixes() {
|
||||
return suffixes;
|
||||
}
|
||||
|
|
|
@ -57,7 +57,9 @@ public abstract class DictEntry {
|
|||
return Objects.hash(stem, getFlags(), getMorphologicalData());
|
||||
}
|
||||
|
||||
/** @return the stem word in the dictionary */
|
||||
/**
|
||||
* @return the stem word in the dictionary
|
||||
*/
|
||||
public String getStem() {
|
||||
return stem;
|
||||
}
|
||||
|
|
|
@ -1006,7 +1006,9 @@ public class Dictionary {
|
|||
return wordCount;
|
||||
}
|
||||
|
||||
/** @return the number of word entries written */
|
||||
/**
|
||||
* @return the number of word entries written
|
||||
*/
|
||||
private int writeNormalizedWordEntry(StringBuilder reuse, ByteSequencesWriter writer, String line)
|
||||
throws IOException {
|
||||
int flagSep = line.indexOf(FLAG_SEPARATOR);
|
||||
|
@ -1369,7 +1371,9 @@ public class Dictionary {
|
|||
*/
|
||||
abstract String printFlag(char flag);
|
||||
|
||||
/** @return a presentable sorted concatenation of {@link #printFlag} results */
|
||||
/**
|
||||
* @return a presentable sorted concatenation of {@link #printFlag} results
|
||||
*/
|
||||
String printFlags(char[] encodedFlags) {
|
||||
List<String> printed = new ArrayList<>();
|
||||
for (char c : encodedFlags) {
|
||||
|
|
|
@ -41,7 +41,9 @@ public class EntrySuggestion {
|
|||
return toEdit;
|
||||
}
|
||||
|
||||
/** @return new dictionary entries to be added to accommodate the given word list */
|
||||
/**
|
||||
* @return new dictionary entries to be added to accommodate the given word list
|
||||
*/
|
||||
public List<DictEntry> getEntriesToAdd() {
|
||||
return toAdd;
|
||||
}
|
||||
|
|
|
@ -75,7 +75,9 @@ public class Hunspell {
|
|||
stemmer = new Stemmer(dictionary);
|
||||
}
|
||||
|
||||
/** @return whether the given word's spelling is considered correct according to Hunspell rules */
|
||||
/**
|
||||
* @return whether the given word's spelling is considered correct according to Hunspell rules
|
||||
*/
|
||||
public boolean spell(String word) {
|
||||
checkCanceled.run();
|
||||
if (word.isEmpty()) return true;
|
||||
|
|
|
@ -40,7 +40,9 @@ class ModifyingSuggester {
|
|||
this.wordCase = wordCase;
|
||||
}
|
||||
|
||||
/** @return whether any of the added suggestions are considered "good" */
|
||||
/**
|
||||
* @return whether any of the added suggestions are considered "good"
|
||||
*/
|
||||
boolean suggest() {
|
||||
String low =
|
||||
wordCase != WordCase.LOWER ? speller.dictionary.toLowerCase(misspelled) : misspelled;
|
||||
|
|
|
@ -39,22 +39,30 @@ public abstract class Token {
|
|||
this.type = type;
|
||||
}
|
||||
|
||||
/** @return surfaceForm */
|
||||
/**
|
||||
* @return surfaceForm
|
||||
*/
|
||||
public char[] getSurfaceForm() {
|
||||
return surfaceForm;
|
||||
}
|
||||
|
||||
/** @return offset into surfaceForm */
|
||||
/**
|
||||
* @return offset into surfaceForm
|
||||
*/
|
||||
public int getOffset() {
|
||||
return offset;
|
||||
}
|
||||
|
||||
/** @return length of surfaceForm */
|
||||
/**
|
||||
* @return length of surfaceForm
|
||||
*/
|
||||
public int getLength() {
|
||||
return length;
|
||||
}
|
||||
|
||||
/** @return surfaceForm as a String */
|
||||
/**
|
||||
* @return surfaceForm as a String
|
||||
*/
|
||||
public String getSurfaceFormString() {
|
||||
return new String(surfaceForm, offset, length);
|
||||
}
|
||||
|
|
|
@ -111,12 +111,16 @@ public abstract class RSLPStemmerBase {
|
|||
this.min = min;
|
||||
}
|
||||
|
||||
/** @return true if the word matches this rule. */
|
||||
/**
|
||||
* @return true if the word matches this rule.
|
||||
*/
|
||||
public boolean matches(char[] s, int len) {
|
||||
return (len - suffix.length >= min && endsWith(s, len, suffix));
|
||||
}
|
||||
|
||||
/** @return new valid length of the string after firing this rule. */
|
||||
/**
|
||||
* @return new valid length of the string after firing this rule.
|
||||
*/
|
||||
public int replace(char[] s, int len) {
|
||||
if (replacement.length > 0) {
|
||||
System.arraycopy(replacement, 0, s, len - suffix.length, replacement.length);
|
||||
|
@ -208,7 +212,9 @@ public abstract class RSLPStemmerBase {
|
|||
}
|
||||
}
|
||||
|
||||
/** @return new valid length of the string after applying the entire step. */
|
||||
/**
|
||||
* @return new valid length of the string after applying the entire step.
|
||||
*/
|
||||
public int apply(char[] s, int len) {
|
||||
if (len < min) return len;
|
||||
|
||||
|
|
|
@ -532,7 +532,9 @@ public final class ShingleFilter extends TokenFilter {
|
|||
return value == minValue;
|
||||
}
|
||||
|
||||
/** @return the value this instance had before the last advance() call */
|
||||
/**
|
||||
* @return the value this instance had before the last advance() call
|
||||
*/
|
||||
public int getPreviousValue() {
|
||||
return previousValue;
|
||||
}
|
||||
|
|
|
@ -23,7 +23,9 @@ import org.apache.lucene.tests.analysis.BaseTokenStreamFactoryTestCase;
|
|||
import org.apache.lucene.tests.util.StringMockResourceLoader;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/** @since solr 1.4 */
|
||||
/**
|
||||
* @since solr 1.4
|
||||
*/
|
||||
public class TestMultiWordSynonyms extends BaseTokenStreamFactoryTestCase {
|
||||
|
||||
public void testMultiWordSynonyms() throws Exception {
|
||||
|
|
|
@ -62,7 +62,9 @@ final class ScriptIterator {
|
|||
|
||||
private final boolean combineCJ;
|
||||
|
||||
/** @param combineCJ if true: Han,Hiragana,Katakana will all return as {@link UScript#JAPANESE} */
|
||||
/**
|
||||
* @param combineCJ if true: Han,Hiragana,Katakana will all return as {@link UScript#JAPANESE}
|
||||
*/
|
||||
ScriptIterator(boolean combineCJ) {
|
||||
this.combineCJ = combineCJ;
|
||||
}
|
||||
|
|
|
@ -58,32 +58,44 @@ public class Token extends org.apache.lucene.analysis.morph.Token {
|
|||
+ ")";
|
||||
}
|
||||
|
||||
/** @return reading. null if token doesn't have reading. */
|
||||
/**
|
||||
* @return reading. null if token doesn't have reading.
|
||||
*/
|
||||
public String getReading() {
|
||||
return morphData.getReading(morphId, surfaceForm, offset, length);
|
||||
}
|
||||
|
||||
/** @return pronunciation. null if token doesn't have pronunciation. */
|
||||
/**
|
||||
* @return pronunciation. null if token doesn't have pronunciation.
|
||||
*/
|
||||
public String getPronunciation() {
|
||||
return morphData.getPronunciation(morphId, surfaceForm, offset, length);
|
||||
}
|
||||
|
||||
/** @return part of speech. */
|
||||
/**
|
||||
* @return part of speech.
|
||||
*/
|
||||
public String getPartOfSpeech() {
|
||||
return morphData.getPartOfSpeech(morphId);
|
||||
}
|
||||
|
||||
/** @return inflection type or null */
|
||||
/**
|
||||
* @return inflection type or null
|
||||
*/
|
||||
public String getInflectionType() {
|
||||
return morphData.getInflectionType(morphId);
|
||||
}
|
||||
|
||||
/** @return inflection form or null */
|
||||
/**
|
||||
* @return inflection form or null
|
||||
*/
|
||||
public String getInflectionForm() {
|
||||
return morphData.getInflectionForm(morphId);
|
||||
}
|
||||
|
||||
/** @return base form or null if token is not inflected */
|
||||
/**
|
||||
* @return base form or null if token is not inflected
|
||||
*/
|
||||
public String getBaseForm() {
|
||||
return morphData.getBaseForm(morphId, surfaceForm, offset, length);
|
||||
}
|
||||
|
|
|
@ -34,7 +34,9 @@ public final class TokenInfoFST extends org.apache.lucene.analysis.morph.TokenIn
|
|||
super(fst, fasterButMoreRam ? 0x9FFF : 0x30FF, 0x3040);
|
||||
}
|
||||
|
||||
/** @lucene.internal for testing only */
|
||||
/**
|
||||
* @lucene.internal for testing only
|
||||
*/
|
||||
FST<Long> getInternalFST() {
|
||||
return fst;
|
||||
}
|
||||
|
|
|
@ -26,7 +26,9 @@ public final class TokenInfoFST extends org.apache.lucene.analysis.morph.TokenIn
|
|||
super(fst, 0xD7A3, 0xAC00);
|
||||
}
|
||||
|
||||
/** @lucene.internal for testing only */
|
||||
/**
|
||||
* @lucene.internal for testing only
|
||||
*/
|
||||
FST<Long> getInternalFST() {
|
||||
return fst;
|
||||
}
|
||||
|
|
|
@ -35,7 +35,9 @@ class PathNode implements Comparable<PathNode> {
|
|||
else return 1;
|
||||
}
|
||||
|
||||
/** @see java.lang.Object#hashCode() */
|
||||
/**
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
|
@ -47,7 +49,9 @@ class PathNode implements Comparable<PathNode> {
|
|||
return result;
|
||||
}
|
||||
|
||||
/** @see java.lang.Object#equals(java.lang.Object) */
|
||||
/**
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
|
|
|
@ -60,7 +60,9 @@ public class SegToken {
|
|||
this.weight = weight;
|
||||
}
|
||||
|
||||
/** @see java.lang.Object#hashCode() */
|
||||
/**
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
|
@ -76,7 +78,9 @@ public class SegToken {
|
|||
return result;
|
||||
}
|
||||
|
||||
/** @see java.lang.Object#equals(java.lang.Object) */
|
||||
/**
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
|
|
|
@ -42,7 +42,9 @@ class SegTokenPair {
|
|||
this.weight = weight;
|
||||
}
|
||||
|
||||
/** @see java.lang.Object#hashCode() */
|
||||
/**
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
|
@ -58,7 +60,9 @@ class SegTokenPair {
|
|||
return result;
|
||||
}
|
||||
|
||||
/** @see java.lang.Object#equals(java.lang.Object) */
|
||||
/**
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
|
|
|
@ -262,7 +262,9 @@ public final class Lucene50CompressingStoredFieldsReader extends StoredFieldsRea
|
|||
}
|
||||
}
|
||||
|
||||
/** @throws AlreadyClosedException if this FieldsReader is closed */
|
||||
/**
|
||||
* @throws AlreadyClosedException if this FieldsReader is closed
|
||||
*/
|
||||
private void ensureOpen() throws AlreadyClosedException {
|
||||
if (closed) {
|
||||
throw new AlreadyClosedException("this FieldsReader is closed");
|
||||
|
|
|
@ -254,7 +254,9 @@ public final class Lucene50CompressingTermVectorsReader extends TermVectorsReade
|
|||
}
|
||||
}
|
||||
|
||||
/** @throws AlreadyClosedException if this TermVectorsReader is closed */
|
||||
/**
|
||||
* @throws AlreadyClosedException if this TermVectorsReader is closed
|
||||
*/
|
||||
private void ensureOpen() throws AlreadyClosedException {
|
||||
if (closed) {
|
||||
throw new AlreadyClosedException("this FieldsReader is closed");
|
||||
|
|
|
@ -132,12 +132,16 @@ public class Benchmark {
|
|||
System.out.println("####################");
|
||||
}
|
||||
|
||||
/** @return Returns the algorithm. */
|
||||
/**
|
||||
* @return Returns the algorithm.
|
||||
*/
|
||||
public Algorithm getAlgorithm() {
|
||||
return algorithm;
|
||||
}
|
||||
|
||||
/** @return Returns the runData. */
|
||||
/**
|
||||
* @return Returns the runData.
|
||||
*/
|
||||
public PerfRunData getRunData() {
|
||||
return runData;
|
||||
}
|
||||
|
|
|
@ -258,27 +258,37 @@ public class PerfRunData implements Closeable {
|
|||
return startTimeMillis;
|
||||
}
|
||||
|
||||
/** @return Start time in milliseconds */
|
||||
/**
|
||||
* @return Start time in milliseconds
|
||||
*/
|
||||
public long getStartTimeMillis() {
|
||||
return startTimeMillis;
|
||||
}
|
||||
|
||||
/** @return Returns the points. */
|
||||
/**
|
||||
* @return Returns the points.
|
||||
*/
|
||||
public Points getPoints() {
|
||||
return points;
|
||||
}
|
||||
|
||||
/** @return Returns the directory. */
|
||||
/**
|
||||
* @return Returns the directory.
|
||||
*/
|
||||
public Directory getDirectory() {
|
||||
return directory;
|
||||
}
|
||||
|
||||
/** @param directory The directory to set. */
|
||||
/**
|
||||
* @param directory The directory to set.
|
||||
*/
|
||||
public void setDirectory(Directory directory) {
|
||||
this.directory = directory;
|
||||
}
|
||||
|
||||
/** @return Returns the taxonomy directory */
|
||||
/**
|
||||
* @return Returns the taxonomy directory
|
||||
*/
|
||||
public Directory getTaxonomyDir() {
|
||||
return taxonomyDir;
|
||||
}
|
||||
|
@ -315,7 +325,9 @@ public class PerfRunData implements Closeable {
|
|||
return taxonomyReader;
|
||||
}
|
||||
|
||||
/** @param taxoWriter The taxonomy writer to set. */
|
||||
/**
|
||||
* @param taxoWriter The taxonomy writer to set.
|
||||
*/
|
||||
public void setTaxonomyWriter(TaxonomyWriter taxoWriter) {
|
||||
this.taxonomyWriter = taxoWriter;
|
||||
}
|
||||
|
@ -376,17 +388,23 @@ public class PerfRunData implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
/** @return Returns the indexWriter. */
|
||||
/**
|
||||
* @return Returns the indexWriter.
|
||||
*/
|
||||
public IndexWriter getIndexWriter() {
|
||||
return indexWriter;
|
||||
}
|
||||
|
||||
/** @param indexWriter The indexWriter to set. */
|
||||
/**
|
||||
* @param indexWriter The indexWriter to set.
|
||||
*/
|
||||
public void setIndexWriter(IndexWriter indexWriter) {
|
||||
this.indexWriter = indexWriter;
|
||||
}
|
||||
|
||||
/** @return Returns the analyzer. */
|
||||
/**
|
||||
* @return Returns the analyzer.
|
||||
*/
|
||||
public Analyzer getAnalyzer() {
|
||||
return analyzer;
|
||||
}
|
||||
|
@ -410,17 +428,23 @@ public class PerfRunData implements Closeable {
|
|||
return facetSource;
|
||||
}
|
||||
|
||||
/** @return the locale */
|
||||
/**
|
||||
* @return the locale
|
||||
*/
|
||||
public Locale getLocale() {
|
||||
return locale;
|
||||
}
|
||||
|
||||
/** @param locale the locale to set */
|
||||
/**
|
||||
* @param locale the locale to set
|
||||
*/
|
||||
public void setLocale(Locale locale) {
|
||||
this.locale = locale;
|
||||
}
|
||||
|
||||
/** @return Returns the config. */
|
||||
/**
|
||||
* @return Returns the config.
|
||||
*/
|
||||
public Config getConfig() {
|
||||
return config;
|
||||
}
|
||||
|
@ -440,7 +464,9 @@ public class PerfRunData implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
/** @return Returns the queryMaker by read task type (class) */
|
||||
/**
|
||||
* @return Returns the queryMaker by read task type (class)
|
||||
*/
|
||||
public synchronized QueryMaker getQueryMaker(ReadTask readTask) {
|
||||
// mapping the query maker by task class allows extending/adding new search/read tasks
|
||||
// without needing to modify this class.
|
||||
|
|
|
@ -103,7 +103,9 @@ public class TaskStats implements Cloneable {
|
|||
return countsByTimeStepMSec;
|
||||
}
|
||||
|
||||
/** @return the taskRunNum. */
|
||||
/**
|
||||
* @return the taskRunNum.
|
||||
*/
|
||||
public int getTaskRunNum() {
|
||||
return taskRunNum;
|
||||
}
|
||||
|
@ -121,37 +123,51 @@ public class TaskStats implements Cloneable {
|
|||
return res.toString();
|
||||
}
|
||||
|
||||
/** @return Returns the count. */
|
||||
/**
|
||||
* @return Returns the count.
|
||||
*/
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
/** @return elapsed time. */
|
||||
/**
|
||||
* @return elapsed time.
|
||||
*/
|
||||
public long getElapsed() {
|
||||
return elapsed;
|
||||
}
|
||||
|
||||
/** @return Returns the maxTotMem. */
|
||||
/**
|
||||
* @return Returns the maxTotMem.
|
||||
*/
|
||||
public long getMaxTotMem() {
|
||||
return maxTotMem;
|
||||
}
|
||||
|
||||
/** @return Returns the maxUsedMem. */
|
||||
/**
|
||||
* @return Returns the maxUsedMem.
|
||||
*/
|
||||
public long getMaxUsedMem() {
|
||||
return maxUsedMem;
|
||||
}
|
||||
|
||||
/** @return Returns the numParallelTasks. */
|
||||
/**
|
||||
* @return Returns the numParallelTasks.
|
||||
*/
|
||||
public int getNumParallelTasks() {
|
||||
return numParallelTasks;
|
||||
}
|
||||
|
||||
/** @return Returns the task. */
|
||||
/**
|
||||
* @return Returns the task.
|
||||
*/
|
||||
public PerfTask getTask() {
|
||||
return task;
|
||||
}
|
||||
|
||||
/** @return Returns the numRuns. */
|
||||
/**
|
||||
* @return Returns the numRuns.
|
||||
*/
|
||||
public int getNumRuns() {
|
||||
return numRuns;
|
||||
}
|
||||
|
@ -196,7 +212,9 @@ public class TaskStats implements Cloneable {
|
|||
return c;
|
||||
}
|
||||
|
||||
/** @return the round number. */
|
||||
/**
|
||||
* @return the round number.
|
||||
*/
|
||||
public int getRound() {
|
||||
return round;
|
||||
}
|
||||
|
|
|
@ -156,7 +156,9 @@ public abstract class PerfTask implements Cloneable {
|
|||
*/
|
||||
public abstract int doLogic() throws Exception;
|
||||
|
||||
/** @return Returns the name. */
|
||||
/**
|
||||
* @return Returns the name.
|
||||
*/
|
||||
public String getName() {
|
||||
if (params == null) {
|
||||
return name;
|
||||
|
@ -164,22 +166,30 @@ public abstract class PerfTask implements Cloneable {
|
|||
return new StringBuilder(name).append('(').append(params).append(')').toString();
|
||||
}
|
||||
|
||||
/** @param name The name to set. */
|
||||
/**
|
||||
* @param name The name to set.
|
||||
*/
|
||||
protected void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/** @return Returns the run data. */
|
||||
/**
|
||||
* @return Returns the run data.
|
||||
*/
|
||||
public PerfRunData getRunData() {
|
||||
return runData;
|
||||
}
|
||||
|
||||
/** @return Returns the depth. */
|
||||
/**
|
||||
* @return Returns the depth.
|
||||
*/
|
||||
public int getDepth() {
|
||||
return depth;
|
||||
}
|
||||
|
||||
/** @param depth The depth to set. */
|
||||
/**
|
||||
* @param depth The depth to set.
|
||||
*/
|
||||
public void setDepth(int depth) {
|
||||
this.depth = depth;
|
||||
}
|
||||
|
@ -209,7 +219,9 @@ public abstract class PerfTask implements Cloneable {
|
|||
return sb.toString();
|
||||
}
|
||||
|
||||
/** @return Returns the maxDepthLogStart. */
|
||||
/**
|
||||
* @return Returns the maxDepthLogStart.
|
||||
*/
|
||||
int getMaxDepthLogStart() {
|
||||
return maxDepthLogStart;
|
||||
}
|
||||
|
@ -285,7 +297,9 @@ public abstract class PerfTask implements Cloneable {
|
|||
this.params = params;
|
||||
}
|
||||
|
||||
/** @return Returns the Params. */
|
||||
/**
|
||||
* @return Returns the Params.
|
||||
*/
|
||||
public String getParams() {
|
||||
return params;
|
||||
}
|
||||
|
|
|
@ -83,12 +83,16 @@ public class TaskSequence extends PerfTask {
|
|||
}
|
||||
}
|
||||
|
||||
/** @return Returns the parallel. */
|
||||
/**
|
||||
* @return Returns the parallel.
|
||||
*/
|
||||
public boolean isParallel() {
|
||||
return parallel;
|
||||
}
|
||||
|
||||
/** @return Returns the repetitions. */
|
||||
/**
|
||||
* @return Returns the repetitions.
|
||||
*/
|
||||
public int getRepetitions() {
|
||||
return repetitions;
|
||||
}
|
||||
|
@ -100,7 +104,9 @@ public class TaskSequence extends PerfTask {
|
|||
fixedTime = true;
|
||||
}
|
||||
|
||||
/** @param repetitions The repetitions to set. */
|
||||
/**
|
||||
* @param repetitions The repetitions to set.
|
||||
*/
|
||||
public void setRepetitions(int repetitions) throws Exception {
|
||||
fixedTime = false;
|
||||
this.repetitions = repetitions;
|
||||
|
@ -112,7 +118,9 @@ public class TaskSequence extends PerfTask {
|
|||
setSequenceName();
|
||||
}
|
||||
|
||||
/** @return Returns the parent. */
|
||||
/**
|
||||
* @return Returns the parent.
|
||||
*/
|
||||
public TaskSequence getParent() {
|
||||
return parent;
|
||||
}
|
||||
|
@ -484,7 +492,9 @@ public class TaskSequence extends PerfTask {
|
|||
return (perMin ? rate : 60 * rate);
|
||||
}
|
||||
|
||||
/** @param rate The rate to set. */
|
||||
/**
|
||||
* @param rate The rate to set.
|
||||
*/
|
||||
public void setRate(int rate, boolean perMin) {
|
||||
this.rate = rate;
|
||||
this.perMin = perMin;
|
||||
|
@ -511,7 +521,9 @@ public class TaskSequence extends PerfTask {
|
|||
return seqName; // override to include more info
|
||||
}
|
||||
|
||||
/** @return Returns the tasks. */
|
||||
/**
|
||||
* @return Returns the tasks.
|
||||
*/
|
||||
public ArrayList<PerfTask> getTasks() {
|
||||
return tasks;
|
||||
}
|
||||
|
|
|
@ -389,7 +389,9 @@ public class Config {
|
|||
return res;
|
||||
}
|
||||
|
||||
/** @return names of params set by round, for reports title */
|
||||
/**
|
||||
* @return names of params set by round, for reports title
|
||||
*/
|
||||
public String getColsNamesForValsByRound() {
|
||||
if (colForValByRound.size() == 0) {
|
||||
return "";
|
||||
|
@ -401,7 +403,9 @@ public class Config {
|
|||
return sb.toString();
|
||||
}
|
||||
|
||||
/** @return values of params set by round, for reports lines. */
|
||||
/**
|
||||
* @return values of params set by round, for reports lines.
|
||||
*/
|
||||
public String getColsValuesForValsByRound(int roundNum) {
|
||||
if (colForValByRound.size() == 0) {
|
||||
return "";
|
||||
|
@ -440,12 +444,16 @@ public class Config {
|
|||
return sb.toString();
|
||||
}
|
||||
|
||||
/** @return the round number. */
|
||||
/**
|
||||
* @return the round number.
|
||||
*/
|
||||
public int getRoundNumber() {
|
||||
return roundNumber;
|
||||
}
|
||||
|
||||
/** @return Returns the algorithmText. */
|
||||
/**
|
||||
* @return Returns the algorithmText.
|
||||
*/
|
||||
public String getAlgorithmText() {
|
||||
return algorithmText;
|
||||
}
|
||||
|
|
|
@ -142,7 +142,9 @@ public class QualityBenchmark {
|
|||
return stts;
|
||||
}
|
||||
|
||||
/** @return the maximum number of quality queries to run. Useful at debugging. */
|
||||
/**
|
||||
* @return the maximum number of quality queries to run. Useful at debugging.
|
||||
*/
|
||||
public int getMaxQueries() {
|
||||
return maxQueries;
|
||||
}
|
||||
|
@ -152,7 +154,9 @@ public class QualityBenchmark {
|
|||
this.maxQueries = maxQueries;
|
||||
}
|
||||
|
||||
/** @return the maximum number of results to collect for each quality query. */
|
||||
/**
|
||||
* @return the maximum number of results to collect for each quality query.
|
||||
*/
|
||||
public int getMaxResults() {
|
||||
return maxResults;
|
||||
}
|
||||
|
|
|
@ -99,17 +99,23 @@ public class BlockHeader implements Accountable {
|
|||
return this;
|
||||
}
|
||||
|
||||
/** @return The number of lines in the block. */
|
||||
/**
|
||||
* @return The number of lines in the block.
|
||||
*/
|
||||
public int getLinesCount() {
|
||||
return linesCount;
|
||||
}
|
||||
|
||||
/** @return The index of the middle line of the block. */
|
||||
/**
|
||||
* @return The index of the middle line of the block.
|
||||
*/
|
||||
public int getMiddleLineIndex() {
|
||||
return middleLineIndex;
|
||||
}
|
||||
|
||||
/** @return The offset to the middle line of the block, relative to the block start. */
|
||||
/**
|
||||
* @return The offset to the middle line of the block, relative to the block start.
|
||||
*/
|
||||
public int getMiddleLineOffset() {
|
||||
return middleLineOffset;
|
||||
}
|
||||
|
@ -122,17 +128,23 @@ public class BlockHeader implements Accountable {
|
|||
return termStatesBaseOffset;
|
||||
}
|
||||
|
||||
/** @return The file pointer to the docs of the first term with docs in the block. */
|
||||
/**
|
||||
* @return The file pointer to the docs of the first term with docs in the block.
|
||||
*/
|
||||
public long getBaseDocsFP() {
|
||||
return baseDocsFP;
|
||||
}
|
||||
|
||||
/** @return The file pointer to the positions of the first term with positions in the block. */
|
||||
/**
|
||||
* @return The file pointer to the positions of the first term with positions in the block.
|
||||
*/
|
||||
public long getBasePositionsFP() {
|
||||
return basePositionsFP;
|
||||
}
|
||||
|
||||
/** @return The file pointer to the payloads of the first term with payloads in the block. */
|
||||
/**
|
||||
* @return The file pointer to the payloads of the first term with payloads in the block.
|
||||
*/
|
||||
public long getBasePayloadsFP() {
|
||||
return basePayloadsFP;
|
||||
}
|
||||
|
|
|
@ -222,7 +222,9 @@ public class DeltaBaseTermStateSerializer implements Accountable {
|
|||
return RAM_USAGE;
|
||||
}
|
||||
|
||||
/** @return The estimated RAM usage of the given {@link TermState}. */
|
||||
/**
|
||||
* @return The estimated RAM usage of the given {@link TermState}.
|
||||
*/
|
||||
public static long ramBytesUsed(TermState termState) {
|
||||
return termState instanceof IntBlockTermState
|
||||
? INT_BLOCK_TERM_STATE_RAM_USAGE
|
||||
|
|
|
@ -139,7 +139,9 @@ public class FieldMetadata {
|
|||
return isMutable ? docsSeen.cardinality() : docCount;
|
||||
}
|
||||
|
||||
/** @return The file pointer to the start of the first block of the field. */
|
||||
/**
|
||||
* @return The file pointer to the start of the first block of the field.
|
||||
*/
|
||||
public long getFirstBlockStartFP() {
|
||||
return firstBlockStartFP;
|
||||
}
|
||||
|
@ -150,7 +152,9 @@ public class FieldMetadata {
|
|||
this.firstBlockStartFP = firstBlockStartFP;
|
||||
}
|
||||
|
||||
/** @return The start file pointer for the last block of the field. */
|
||||
/**
|
||||
* @return The start file pointer for the last block of the field.
|
||||
*/
|
||||
public long getLastBlockStartFP() {
|
||||
return lastBlockStartFP;
|
||||
}
|
||||
|
@ -161,7 +165,9 @@ public class FieldMetadata {
|
|||
this.lastBlockStartFP = lastBlockStartFP;
|
||||
}
|
||||
|
||||
/** @return The file pointer to the start of the dictionary of the field. */
|
||||
/**
|
||||
* @return The file pointer to the start of the dictionary of the field.
|
||||
*/
|
||||
public long getDictionaryStartFP() {
|
||||
return dictionaryStartFP;
|
||||
}
|
||||
|
|
|
@ -403,7 +403,9 @@ public class IntersectBlockReader extends BlockReader {
|
|||
return linear && term.compareTo(linearUpperBound) < 0;
|
||||
}
|
||||
|
||||
/** @see org.apache.lucene.index.FilteredTermsEnum#nextSeekTerm(BytesRef) */
|
||||
/**
|
||||
* @see org.apache.lucene.index.FilteredTermsEnum#nextSeekTerm(BytesRef)
|
||||
*/
|
||||
protected BytesRef nextSeekTerm(final BytesRef term) {
|
||||
// System.out.println("ATE.nextSeekTerm term=" + term);
|
||||
if (term == null) {
|
||||
|
|
|
@ -79,7 +79,9 @@ public class TermBytes implements Accountable {
|
|||
return mdpLength;
|
||||
}
|
||||
|
||||
/** @return This term bytes. */
|
||||
/**
|
||||
* @return This term bytes.
|
||||
*/
|
||||
public BytesRef getTerm() {
|
||||
return term;
|
||||
}
|
||||
|
|
|
@ -90,7 +90,9 @@ public class UniformSplitPostingsFormat extends PostingsFormat {
|
|||
this(NAME, targetNumBlockLines, deltaNumLines, blockEncoder, blockDecoder, dictionaryOnHeap);
|
||||
}
|
||||
|
||||
/** @see #UniformSplitPostingsFormat(int, int, BlockEncoder, BlockDecoder, boolean) */
|
||||
/**
|
||||
* @see #UniformSplitPostingsFormat(int, int, BlockEncoder, BlockDecoder, boolean)
|
||||
*/
|
||||
protected UniformSplitPostingsFormat(
|
||||
String name,
|
||||
int targetNumBlockLines,
|
||||
|
|
|
@ -90,7 +90,9 @@ public class UniformSplitTermsReader extends FieldsProducer {
|
|||
TERMS_DICTIONARY_EXTENSION);
|
||||
}
|
||||
|
||||
/** @see #UniformSplitTermsReader(PostingsReaderBase, SegmentReadState, BlockDecoder, boolean) */
|
||||
/**
|
||||
* @see #UniformSplitTermsReader(PostingsReaderBase, SegmentReadState, BlockDecoder, boolean)
|
||||
*/
|
||||
protected UniformSplitTermsReader(
|
||||
PostingsReaderBase postingsReader,
|
||||
SegmentReadState state,
|
||||
|
|
|
@ -305,7 +305,9 @@ public class UniformSplitTermsWriter extends FieldsConsumer {
|
|||
encodedBytes.writeTo(blockOutput);
|
||||
}
|
||||
|
||||
/** @return 1 if the field was written; 0 otherwise. */
|
||||
/**
|
||||
* @return 1 if the field was written; 0 otherwise.
|
||||
*/
|
||||
protected int writeFieldTerms(
|
||||
BlockWriter blockWriter,
|
||||
DataOutput fieldsOutput,
|
||||
|
|
|
@ -103,7 +103,9 @@ public abstract class Tokenizer extends TokenStream {
|
|||
inputPending = ILLEGAL_STATE_READER;
|
||||
}
|
||||
|
||||
/** @lucene.internal */
|
||||
/**
|
||||
* @lucene.internal
|
||||
*/
|
||||
protected void setReaderTestPoint() {}
|
||||
|
||||
private static final Reader ILLEGAL_STATE_READER =
|
||||
|
|
|
@ -217,7 +217,9 @@ public final class Lucene90CompressingStoredFieldsReader extends StoredFieldsRea
|
|||
}
|
||||
}
|
||||
|
||||
/** @throws AlreadyClosedException if this FieldsReader is closed */
|
||||
/**
|
||||
* @throws AlreadyClosedException if this FieldsReader is closed
|
||||
*/
|
||||
private void ensureOpen() throws AlreadyClosedException {
|
||||
if (closed) {
|
||||
throw new AlreadyClosedException("this FieldsReader is closed");
|
||||
|
|
|
@ -284,7 +284,9 @@ public final class Lucene90CompressingTermVectorsReader extends TermVectorsReade
|
|||
return numDocs;
|
||||
}
|
||||
|
||||
/** @throws AlreadyClosedException if this TermVectorsReader is closed */
|
||||
/**
|
||||
* @throws AlreadyClosedException if this TermVectorsReader is closed
|
||||
*/
|
||||
private void ensureOpen() throws AlreadyClosedException {
|
||||
if (closed) {
|
||||
throw new AlreadyClosedException("this FieldsReader is closed");
|
||||
|
|
|
@ -27,7 +27,9 @@ public class ExpandingVectorValues extends FilterVectorValues {
|
|||
|
||||
private final float[] value;
|
||||
|
||||
/** @param in the wrapped values */
|
||||
/**
|
||||
* @param in the wrapped values
|
||||
*/
|
||||
protected ExpandingVectorValues(VectorValues in) {
|
||||
super(in);
|
||||
value = new float[in.dimension()];
|
||||
|
|
|
@ -24,7 +24,9 @@ import java.util.concurrent.locks.ReentrantLock;
|
|||
import org.apache.lucene.index.DocumentsWriterPerThread.FlushedSegment;
|
||||
import org.apache.lucene.util.IOConsumer;
|
||||
|
||||
/** @lucene.internal */
|
||||
/**
|
||||
* @lucene.internal
|
||||
*/
|
||||
final class DocumentsWriterFlushQueue {
|
||||
private final Queue<FlushTicket> queue = new ArrayDeque<>();
|
||||
// we track tickets separately since count must be present even before the ticket is
|
||||
|
|
|
@ -343,7 +343,9 @@ public class LiveIndexWriterConfig {
|
|||
return perThreadHardLimitMB;
|
||||
}
|
||||
|
||||
/** @see IndexWriterConfig#setFlushPolicy(FlushPolicy) */
|
||||
/**
|
||||
* @see IndexWriterConfig#setFlushPolicy(FlushPolicy)
|
||||
*/
|
||||
FlushPolicy getFlushPolicy() {
|
||||
return flushPolicy;
|
||||
}
|
||||
|
|
|
@ -68,18 +68,24 @@ public final class DisjunctionMaxQuery extends Query implements Iterable<Query>
|
|||
this.disjuncts.addAll(disjuncts);
|
||||
}
|
||||
|
||||
/** @return An {@code Iterator<Query>} over the disjuncts */
|
||||
/**
|
||||
* @return An {@code Iterator<Query>} over the disjuncts
|
||||
*/
|
||||
@Override
|
||||
public Iterator<Query> iterator() {
|
||||
return getDisjuncts().iterator();
|
||||
}
|
||||
|
||||
/** @return the disjuncts. */
|
||||
/**
|
||||
* @return the disjuncts.
|
||||
*/
|
||||
public Collection<Query> getDisjuncts() {
|
||||
return Collections.unmodifiableCollection(disjuncts);
|
||||
}
|
||||
|
||||
/** @return tie breaker value for multiple matches. */
|
||||
/**
|
||||
* @return tie breaker value for multiple matches.
|
||||
*/
|
||||
public float getTieBreakerMultiplier() {
|
||||
return tieBreakerMultiplier;
|
||||
}
|
||||
|
|
|
@ -144,7 +144,9 @@ public class FuzzyQuery extends MultiTermQuery {
|
|||
this(term, defaultMaxEdits);
|
||||
}
|
||||
|
||||
/** @return the maximum number of edit distances allowed for this query to match. */
|
||||
/**
|
||||
* @return the maximum number of edit distances allowed for this query to match.
|
||||
*/
|
||||
public int getMaxEdits() {
|
||||
return maxEdits;
|
||||
}
|
||||
|
|
|
@ -283,7 +283,9 @@ public abstract class MultiTermQuery extends Query {
|
|||
return rewriteMethod.rewrite(reader, this);
|
||||
}
|
||||
|
||||
/** @return the rewrite method used to build the final query */
|
||||
/**
|
||||
* @return the rewrite method used to build the final query
|
||||
*/
|
||||
public RewriteMethod getRewriteMethod() {
|
||||
return rewriteMethod;
|
||||
}
|
||||
|
|
|
@ -36,6 +36,8 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
*/
|
||||
public interface SegmentCacheable {
|
||||
|
||||
/** @return {@code true} if the object can be cached against a given leaf */
|
||||
/**
|
||||
* @return {@code true} if the object can be cached against a given leaf
|
||||
*/
|
||||
boolean isCacheable(LeafReaderContext ctx);
|
||||
}
|
||||
|
|
|
@ -344,7 +344,9 @@ public final class ByteBuffersDataOutput extends DataOutput implements Accountab
|
|||
}
|
||||
}
|
||||
|
||||
/** @return The number of bytes written to this output so far. */
|
||||
/**
|
||||
* @return The number of bytes written to this output so far.
|
||||
*/
|
||||
public long size() {
|
||||
long size = 0;
|
||||
int blockCount = blocks.size();
|
||||
|
@ -477,7 +479,9 @@ public final class ByteBuffersDataOutput extends DataOutput implements Accountab
|
|||
currentBlock = EMPTY;
|
||||
}
|
||||
|
||||
/** @return Returns a new {@link ByteBuffersDataOutput} with the {@link #reset()} capability. */
|
||||
/**
|
||||
* @return Returns a new {@link ByteBuffersDataOutput} with the {@link #reset()} capability.
|
||||
*/
|
||||
// TODO: perhaps we can move it out to an utility class (as a supplier of preconfigured
|
||||
// instances?)
|
||||
public static ByteBuffersDataOutput newResettableInstance() {
|
||||
|
|
|
@ -286,7 +286,9 @@ public abstract class FSDirectory extends BaseDirectory {
|
|||
deletePendingFiles();
|
||||
}
|
||||
|
||||
/** @return the underlying filesystem directory */
|
||||
/**
|
||||
* @return the underlying filesystem directory
|
||||
*/
|
||||
public Path getDirectory() {
|
||||
ensureOpen();
|
||||
return directory;
|
||||
|
|
|
@ -147,18 +147,24 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence, Clone
|
|||
return new CharsRef(chars, offset + start, end - start);
|
||||
}
|
||||
|
||||
/** @deprecated This comparator is only a transition mechanism */
|
||||
/**
|
||||
* @deprecated This comparator is only a transition mechanism
|
||||
*/
|
||||
@Deprecated
|
||||
private static final Comparator<CharsRef> utf16SortedAsUTF8SortOrder =
|
||||
new UTF16SortedAsUTF8Comparator();
|
||||
|
||||
/** @deprecated This comparator is only a transition mechanism */
|
||||
/**
|
||||
* @deprecated This comparator is only a transition mechanism
|
||||
*/
|
||||
@Deprecated
|
||||
public static Comparator<CharsRef> getUTF16SortedAsUTF8Comparator() {
|
||||
return utf16SortedAsUTF8SortOrder;
|
||||
}
|
||||
|
||||
/** @deprecated This comparator is only a transition mechanism */
|
||||
/**
|
||||
* @deprecated This comparator is only a transition mechanism
|
||||
*/
|
||||
@Deprecated
|
||||
private static class UTF16SortedAsUTF8Comparator implements Comparator<CharsRef> {
|
||||
// Only singleton
|
||||
|
|
|
@ -31,12 +31,16 @@ public class MapOfSets<K, V> {
|
|||
|
||||
private final Map<K, Set<V>> theMap;
|
||||
|
||||
/** @param m the backing store for this object */
|
||||
/**
|
||||
* @param m the backing store for this object
|
||||
*/
|
||||
public MapOfSets(Map<K, Set<V>> m) {
|
||||
theMap = m;
|
||||
}
|
||||
|
||||
/** @return direct access to the map backing this object. */
|
||||
/**
|
||||
* @return direct access to the map backing this object.
|
||||
*/
|
||||
public Map<K, Set<V>> getMap() {
|
||||
return theMap;
|
||||
}
|
||||
|
|
|
@ -99,17 +99,23 @@ public final class RecyclingByteBlockAllocator extends ByteBlockPool.Allocator {
|
|||
assert bytesUsed.get() >= 0;
|
||||
}
|
||||
|
||||
/** @return the number of currently buffered blocks */
|
||||
/**
|
||||
* @return the number of currently buffered blocks
|
||||
*/
|
||||
public int numBufferedBlocks() {
|
||||
return freeBlocks;
|
||||
}
|
||||
|
||||
/** @return the number of bytes currently allocated by this {@link Allocator} */
|
||||
/**
|
||||
* @return the number of bytes currently allocated by this {@link Allocator}
|
||||
*/
|
||||
public long bytesUsed() {
|
||||
return bytesUsed.get();
|
||||
}
|
||||
|
||||
/** @return the maximum number of buffered byte blocks */
|
||||
/**
|
||||
* @return the maximum number of buffered byte blocks
|
||||
*/
|
||||
public int maxBufferedBlocks() {
|
||||
return maxBufferedBlocks;
|
||||
}
|
||||
|
|
|
@ -99,17 +99,23 @@ public final class RecyclingIntBlockAllocator extends Allocator {
|
|||
assert bytesUsed.get() >= 0;
|
||||
}
|
||||
|
||||
/** @return the number of currently buffered blocks */
|
||||
/**
|
||||
* @return the number of currently buffered blocks
|
||||
*/
|
||||
public int numBufferedBlocks() {
|
||||
return freeBlocks;
|
||||
}
|
||||
|
||||
/** @return the number of bytes currently allocated by this {@link Allocator} */
|
||||
/**
|
||||
* @return the number of bytes currently allocated by this {@link Allocator}
|
||||
*/
|
||||
public long bytesUsed() {
|
||||
return bytesUsed.get();
|
||||
}
|
||||
|
||||
/** @return the maximum number of buffered byte blocks */
|
||||
/**
|
||||
* @return the maximum number of buffered byte blocks
|
||||
*/
|
||||
public int maxBufferedBlocks() {
|
||||
return maxBufferedBlocks;
|
||||
}
|
||||
|
|
|
@ -95,7 +95,9 @@ public final class Version {
|
|||
*/
|
||||
public static final int MIN_SUPPORTED_MAJOR = Version.LATEST.major - 1;
|
||||
|
||||
/** @see #getPackageImplementationVersion() */
|
||||
/**
|
||||
* @see #getPackageImplementationVersion()
|
||||
*/
|
||||
private static String implementationVersion;
|
||||
|
||||
/**
|
||||
|
|
|
@ -94,13 +94,19 @@ public final class FST<T> implements Accountable {
|
|||
*/
|
||||
static final byte ARCS_FOR_DIRECT_ADDRESSING = 1 << 6;
|
||||
|
||||
/** @see #shouldExpandNodeWithFixedLengthArcs */
|
||||
/**
|
||||
* @see #shouldExpandNodeWithFixedLengthArcs
|
||||
*/
|
||||
static final int FIXED_LENGTH_ARC_SHALLOW_DEPTH = 3; // 0 => only root node.
|
||||
|
||||
/** @see #shouldExpandNodeWithFixedLengthArcs */
|
||||
/**
|
||||
* @see #shouldExpandNodeWithFixedLengthArcs
|
||||
*/
|
||||
static final int FIXED_LENGTH_ARC_SHALLOW_NUM_ARCS = 5;
|
||||
|
||||
/** @see #shouldExpandNodeWithFixedLengthArcs */
|
||||
/**
|
||||
* @see #shouldExpandNodeWithFixedLengthArcs
|
||||
*/
|
||||
static final int FIXED_LENGTH_ARC_DEEP_NUM_ARCS = 10;
|
||||
|
||||
/**
|
||||
|
|
|
@ -61,7 +61,9 @@ public class NeighborQueue {
|
|||
this.order = maxHeap ? Order.MAX_HEAP : Order.MIN_HEAP;
|
||||
}
|
||||
|
||||
/** @return the number of elements in the heap */
|
||||
/**
|
||||
* @return the number of elements in the heap
|
||||
*/
|
||||
public int size() {
|
||||
return heap.size();
|
||||
}
|
||||
|
|
|
@ -563,7 +563,9 @@ public class IntIntHashMap implements Iterable<IntIntHashMap.IntIntCursor>, Clon
|
|||
}
|
||||
}
|
||||
|
||||
/** @return Returns a container with all values stored in this map. */
|
||||
/**
|
||||
* @return Returns a container with all values stored in this map.
|
||||
*/
|
||||
public IntContainer values() {
|
||||
return new ValuesContainer();
|
||||
}
|
||||
|
|
|
@ -393,7 +393,9 @@ public class PackedInts {
|
|||
return gets;
|
||||
}
|
||||
|
||||
/** @return the number of values. */
|
||||
/**
|
||||
* @return the number of values.
|
||||
*/
|
||||
public abstract int size();
|
||||
}
|
||||
|
||||
|
|
|
@ -41,7 +41,9 @@ public class PackedLongValues extends LongValues implements Accountable {
|
|||
return new PackedLongValues.Builder(pageSize, acceptableOverheadRatio);
|
||||
}
|
||||
|
||||
/** @see #packedBuilder(int, float) */
|
||||
/**
|
||||
* @see #packedBuilder(int, float)
|
||||
*/
|
||||
public static PackedLongValues.Builder packedBuilder(float acceptableOverheadRatio) {
|
||||
return packedBuilder(DEFAULT_PAGE_SIZE, acceptableOverheadRatio);
|
||||
}
|
||||
|
@ -55,7 +57,9 @@ public class PackedLongValues extends LongValues implements Accountable {
|
|||
return new DeltaPackedLongValues.Builder(pageSize, acceptableOverheadRatio);
|
||||
}
|
||||
|
||||
/** @see #deltaPackedBuilder(int, float) */
|
||||
/**
|
||||
* @see #deltaPackedBuilder(int, float)
|
||||
*/
|
||||
public static PackedLongValues.Builder deltaPackedBuilder(float acceptableOverheadRatio) {
|
||||
return deltaPackedBuilder(DEFAULT_PAGE_SIZE, acceptableOverheadRatio);
|
||||
}
|
||||
|
@ -69,7 +73,9 @@ public class PackedLongValues extends LongValues implements Accountable {
|
|||
return new MonotonicLongValues.Builder(pageSize, acceptableOverheadRatio);
|
||||
}
|
||||
|
||||
/** @see #monotonicBuilder(int, float) */
|
||||
/**
|
||||
* @see #monotonicBuilder(int, float)
|
||||
*/
|
||||
public static PackedLongValues.Builder monotonicBuilder(float acceptableOverheadRatio) {
|
||||
return monotonicBuilder(DEFAULT_PAGE_SIZE, acceptableOverheadRatio);
|
||||
}
|
||||
|
|
|
@ -29,7 +29,9 @@ import org.apache.lucene.tests.util.LuceneTestCase;
|
|||
import org.apache.lucene.tests.util.TestUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
||||
/** @lucene.experimental */
|
||||
/**
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class TestOmitPositions extends LuceneTestCase {
|
||||
|
||||
public void testBasic() throws Exception {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"lucene/expressions/src/java/org/apache/lucene/expressions/js/Javascript.g4": "818e89aae0b6c7601051802013898c128fe7c1ba",
|
||||
"lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptBaseVisitor.java": "45e3c7093f3e485a07be507efbdefc5e3d112576",
|
||||
"lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptLexer.java": "f0a53549fb8329dd4181f836d1778417de08550e",
|
||||
"lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptParser.java": "ca9d587e6e98436647370ee5b7c82ae07243ee50",
|
||||
"lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptLexer.java": "354e2d7a982fec18a06e552438c2b2e2c13137cf",
|
||||
"lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptParser.java": "6182b6a2e3ade663e6f7a8643ace313e66cbf12a",
|
||||
"lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptVisitor.java": "ebf033dc72e63203e5d4d85fd57114dd973482dc"
|
||||
}
|
|
@ -127,7 +127,9 @@ class JavascriptLexer extends Lexer {
|
|||
};
|
||||
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
|
||||
|
||||
/** @deprecated Use {@link #VOCABULARY} instead. */
|
||||
/**
|
||||
* @deprecated Use {@link #VOCABULARY} instead.
|
||||
*/
|
||||
@Deprecated public static final String[] tokenNames;
|
||||
|
||||
static {
|
||||
|
|
|
@ -90,7 +90,9 @@ class JavascriptParser extends Parser {
|
|||
};
|
||||
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
|
||||
|
||||
/** @deprecated Use {@link #VOCABULARY} instead. */
|
||||
/**
|
||||
* @deprecated Use {@link #VOCABULARY} instead.
|
||||
*/
|
||||
@Deprecated public static final String[] tokenNames;
|
||||
|
||||
static {
|
||||
|
|
|
@ -16,7 +16,9 @@
|
|||
*/
|
||||
package org.apache.lucene.facet.taxonomy.directory;
|
||||
|
||||
/** @lucene.experimental */
|
||||
/**
|
||||
* @lucene.experimental
|
||||
*/
|
||||
abstract class Consts {
|
||||
static final String FULL = "$full_path$";
|
||||
static final String FIELD_PARENT_ORDINAL_NDV = "$parent_ndv$";
|
||||
|
|
|
@ -109,7 +109,9 @@ public abstract class AllGroupHeadsCollector<T> extends SimpleCollector {
|
|||
return docHeads;
|
||||
}
|
||||
|
||||
/** @return the number of group heads found for a query. */
|
||||
/**
|
||||
* @return the number of group heads found for a query.
|
||||
*/
|
||||
public int groupHeadsSize() {
|
||||
return getCollectedGroupHeads().size();
|
||||
}
|
||||
|
|
|
@ -52,7 +52,9 @@ public class FirstPassGroupingCollector<T> extends SimpleCollector {
|
|||
private final int compIDXEnd;
|
||||
|
||||
// Set once we reach topNGroups unique groups:
|
||||
/** @lucene.internal */
|
||||
/**
|
||||
* @lucene.internal
|
||||
*/
|
||||
protected TreeSet<CollectedSearchGroup<T>> orderedGroups;
|
||||
|
||||
private int docBase;
|
||||
|
@ -353,7 +355,9 @@ public class FirstPassGroupingCollector<T> extends SimpleCollector {
|
|||
groupSelector.setNextReader(readerContext);
|
||||
}
|
||||
|
||||
/** @return the GroupSelector used for this Collector */
|
||||
/**
|
||||
* @return the GroupSelector used for this Collector
|
||||
*/
|
||||
public GroupSelector<T> getGroupSelector() {
|
||||
return groupSelector;
|
||||
}
|
||||
|
|
|
@ -263,12 +263,16 @@ public abstract class GroupFacetCollector extends SimpleCollector {
|
|||
return "FacetEntry{" + "value=" + value.utf8ToString() + ", count=" + count + '}';
|
||||
}
|
||||
|
||||
/** @return The value of this facet entry */
|
||||
/**
|
||||
* @return The value of this facet entry
|
||||
*/
|
||||
public BytesRef getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
/** @return The count (number of groups) of this facet entry. */
|
||||
/**
|
||||
* @return The count (number of groups) of this facet entry.
|
||||
*/
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
|
|
@ -55,7 +55,9 @@ public abstract class GroupSelector<T> {
|
|||
*/
|
||||
public abstract T currentValue() throws IOException;
|
||||
|
||||
/** @return a copy of the group value of the current document */
|
||||
/**
|
||||
* @return a copy of the group value of the current document
|
||||
*/
|
||||
public abstract T copyValue() throws IOException;
|
||||
|
||||
/**
|
||||
|
|
|
@ -66,7 +66,9 @@ public class SecondPassGroupingCollector<T> extends SimpleCollector {
|
|||
reducer.setGroups(groups);
|
||||
}
|
||||
|
||||
/** @return the GroupSelector used in this collector */
|
||||
/**
|
||||
* @return the GroupSelector used in this collector
|
||||
*/
|
||||
public GroupSelector<T> getGroupSelector() {
|
||||
return groupSelector;
|
||||
}
|
||||
|
|
|
@ -18,6 +18,8 @@ package org.apache.lucene.search.highlight;
|
|||
|
||||
/** Encodes original text. The Encoder works with the {@link Formatter} to generate output. */
|
||||
public interface Encoder {
|
||||
/** @param originalText The section of text being output */
|
||||
/**
|
||||
* @param originalText The section of text being output
|
||||
*/
|
||||
String encodeText(String originalText);
|
||||
}
|
||||
|
|
|
@ -423,7 +423,9 @@ public class Highlighter {
|
|||
textFragmenter = Objects.requireNonNull(fragmenter);
|
||||
}
|
||||
|
||||
/** @return Object used to score each text fragment */
|
||||
/**
|
||||
* @return Object used to score each text fragment
|
||||
*/
|
||||
public Scorer getFragmentScorer() {
|
||||
return fragmentScorer;
|
||||
}
|
||||
|
|
|
@ -53,7 +53,9 @@ public class QueryScorer implements Scorer {
|
|||
private int maxCharsToAnalyze;
|
||||
private boolean usePayloads = false;
|
||||
|
||||
/** @param query Query to use for highlighting */
|
||||
/**
|
||||
* @param query Query to use for highlighting
|
||||
*/
|
||||
public QueryScorer(Query query) {
|
||||
init(query, null, null, true);
|
||||
}
|
||||
|
@ -85,13 +87,17 @@ public class QueryScorer implements Scorer {
|
|||
init(query, field, reader, true);
|
||||
}
|
||||
|
||||
/** @param defaultField - The default field for queries with the field name unspecified */
|
||||
/**
|
||||
* @param defaultField - The default field for queries with the field name unspecified
|
||||
*/
|
||||
public QueryScorer(Query query, String field, String defaultField) {
|
||||
this.defaultField = defaultField;
|
||||
init(query, field, null, true);
|
||||
}
|
||||
|
||||
/** @param weightedTerms an array of pre-created {@link WeightedSpanTerm}s */
|
||||
/**
|
||||
* @param weightedTerms an array of pre-created {@link WeightedSpanTerm}s
|
||||
*/
|
||||
public QueryScorer(WeightedSpanTerm[] weightedTerms) {
|
||||
this.fieldWeightedSpanTerms = new HashMap<>(weightedTerms.length);
|
||||
|
||||
|
@ -228,7 +234,9 @@ public class QueryScorer implements Scorer {
|
|||
totalScore = 0;
|
||||
}
|
||||
|
||||
/** @return true if multi-term queries should be expanded */
|
||||
/**
|
||||
* @return true if multi-term queries should be expanded
|
||||
*/
|
||||
public boolean isExpandMultiTermQuery() {
|
||||
return expandMultiTermQuery;
|
||||
}
|
||||
|
|
|
@ -33,7 +33,9 @@ public class SimpleFragmenter implements Fragmenter {
|
|||
this(DEFAULT_FRAGMENT_SIZE);
|
||||
}
|
||||
|
||||
/** @param fragmentSize size in number of characters of each fragment */
|
||||
/**
|
||||
* @param fragmentSize size in number of characters of each fragment
|
||||
*/
|
||||
public SimpleFragmenter(int fragmentSize) {
|
||||
this.fragmentSize = fragmentSize;
|
||||
}
|
||||
|
@ -59,12 +61,16 @@ public class SimpleFragmenter implements Fragmenter {
|
|||
return isNewFrag;
|
||||
}
|
||||
|
||||
/** @return size in number of characters of each fragment */
|
||||
/**
|
||||
* @return size in number of characters of each fragment
|
||||
*/
|
||||
public int getFragmentSize() {
|
||||
return fragmentSize;
|
||||
}
|
||||
|
||||
/** @param size size in characters of each fragment */
|
||||
/**
|
||||
* @param size size in characters of each fragment
|
||||
*/
|
||||
public void setFragmentSize(int size) {
|
||||
fragmentSize = size;
|
||||
}
|
||||
|
|
|
@ -39,7 +39,9 @@ public class SimpleSpanFragmenter implements Fragmenter {
|
|||
private PositionIncrementAttribute posIncAtt;
|
||||
private OffsetAttribute offsetAtt;
|
||||
|
||||
/** @param queryScorer QueryScorer that was used to score hits */
|
||||
/**
|
||||
* @param queryScorer QueryScorer that was used to score hits
|
||||
*/
|
||||
public SimpleSpanFragmenter(QueryScorer queryScorer) {
|
||||
this(queryScorer, DEFAULT_FRAGMENT_SIZE);
|
||||
}
|
||||
|
|
|
@ -36,17 +36,23 @@ public class TextFragment {
|
|||
public float getScore() {
|
||||
return score;
|
||||
}
|
||||
/** @param frag2 Fragment to be merged into this one */
|
||||
/**
|
||||
* @param frag2 Fragment to be merged into this one
|
||||
*/
|
||||
public void merge(TextFragment frag2) {
|
||||
textEndPos = frag2.textEndPos;
|
||||
score = Math.max(score, frag2.score);
|
||||
}
|
||||
/** @return true if this fragment follows the one passed */
|
||||
/**
|
||||
* @return true if this fragment follows the one passed
|
||||
*/
|
||||
public boolean follows(TextFragment fragment) {
|
||||
return textStartPos == fragment.textEndPos;
|
||||
}
|
||||
|
||||
/** @return the fragment sequence number */
|
||||
/**
|
||||
* @return the fragment sequence number
|
||||
*/
|
||||
public int getFragNum() {
|
||||
return fragNum;
|
||||
}
|
||||
|
|
|
@ -103,12 +103,16 @@ public class TokenGroup {
|
|||
return matchEndOffset;
|
||||
}
|
||||
|
||||
/** @return the number of tokens in this group */
|
||||
/**
|
||||
* @return the number of tokens in this group
|
||||
*/
|
||||
public int getNumTokens() {
|
||||
return numTokens;
|
||||
}
|
||||
|
||||
/** @return all tokens' scores summed up */
|
||||
/**
|
||||
* @return all tokens' scores summed up
|
||||
*/
|
||||
public float getTotalScore() {
|
||||
return tot;
|
||||
}
|
||||
|
|
|
@ -25,22 +25,30 @@ public class WeightedTerm {
|
|||
this.term = term;
|
||||
}
|
||||
|
||||
/** @return the term value (stemmed) */
|
||||
/**
|
||||
* @return the term value (stemmed)
|
||||
*/
|
||||
public String getTerm() {
|
||||
return term;
|
||||
}
|
||||
|
||||
/** @return the weight associated with this term */
|
||||
/**
|
||||
* @return the weight associated with this term
|
||||
*/
|
||||
public float getWeight() {
|
||||
return weight;
|
||||
}
|
||||
|
||||
/** @param term the term value (stemmed) */
|
||||
/**
|
||||
* @param term the term value (stemmed)
|
||||
*/
|
||||
public void setTerm(String term) {
|
||||
this.term = term;
|
||||
}
|
||||
|
||||
/** @param weight the weight associated with this term */
|
||||
/**
|
||||
* @param weight the weight associated with this term
|
||||
*/
|
||||
public void setWeight(float weight) {
|
||||
this.weight = weight;
|
||||
}
|
||||
|
|
|
@ -71,7 +71,9 @@ public class OffsetRange implements Cloneable {
|
|||
return new OffsetRange(from, to);
|
||||
}
|
||||
|
||||
/** @return {@code true} if this range contains or is equal to {@code other}. */
|
||||
/**
|
||||
* @return {@code true} if this range contains or is equal to {@code other}.
|
||||
*/
|
||||
public boolean contains(OffsetRange other) {
|
||||
return from <= other.from && to >= other.to;
|
||||
}
|
||||
|
|
|
@ -28,7 +28,9 @@ import org.apache.lucene.util.automaton.ByteRunAutomaton;
|
|||
*/
|
||||
public interface LabelledCharArrayMatcher extends CharArrayMatcher {
|
||||
|
||||
/** @return the label for this matcher */
|
||||
/**
|
||||
* @return the label for this matcher
|
||||
*/
|
||||
String getLabel();
|
||||
|
||||
/** Associates a label with a CharArrayMatcher */
|
||||
|
|
|
@ -39,7 +39,9 @@ public class Passage {
|
|||
private int[] matchTermFreqInDoc = new int[8];
|
||||
private int numMatches = 0;
|
||||
|
||||
/** @lucene.internal */
|
||||
/**
|
||||
* @lucene.internal
|
||||
*/
|
||||
public void addMatch(int startOffset, int endOffset, BytesRef term, int termFreqInDoc) {
|
||||
assert startOffset >= this.startOffset && startOffset <= this.endOffset;
|
||||
if (numMatches == matchStarts.length) {
|
||||
|
@ -65,7 +67,9 @@ public class Passage {
|
|||
numMatches++;
|
||||
}
|
||||
|
||||
/** @lucene.internal */
|
||||
/**
|
||||
* @lucene.internal
|
||||
*/
|
||||
public void reset() {
|
||||
startOffset = endOffset = -1;
|
||||
score = 0.0f;
|
||||
|
@ -177,12 +181,16 @@ public class Passage {
|
|||
return matchTermFreqInDoc;
|
||||
}
|
||||
|
||||
/** @lucene.internal */
|
||||
/**
|
||||
* @lucene.internal
|
||||
*/
|
||||
public void setStartOffset(int startOffset) {
|
||||
this.startOffset = startOffset;
|
||||
}
|
||||
|
||||
/** @lucene.internal */
|
||||
/**
|
||||
* @lucene.internal
|
||||
*/
|
||||
public void setEndOffset(int endOffset) {
|
||||
assert startOffset <= endOffset;
|
||||
this.endOffset = endOffset;
|
||||
|
|
|
@ -1349,7 +1349,9 @@ public class UnifiedHighlighter {
|
|||
return docListOfFields;
|
||||
}
|
||||
|
||||
/** @lucene.internal */
|
||||
/**
|
||||
* @lucene.internal
|
||||
*/
|
||||
protected LimitedStoredFieldVisitor newLimitedStoredFieldsVisitor(String[] fields) {
|
||||
return new LimitedStoredFieldVisitor(fields, MULTIVAL_SEP_CHAR, getMaxLength());
|
||||
}
|
||||
|
@ -1481,16 +1483,24 @@ public class UnifiedHighlighter {
|
|||
|
||||
/** Flags for controlling highlighting behavior. */
|
||||
public enum HighlightFlag {
|
||||
/** @see Builder#withHighlightPhrasesStrictly(boolean) */
|
||||
/**
|
||||
* @see Builder#withHighlightPhrasesStrictly(boolean)
|
||||
*/
|
||||
PHRASES,
|
||||
|
||||
/** @see Builder#withHandleMultiTermQuery(boolean) */
|
||||
/**
|
||||
* @see Builder#withHandleMultiTermQuery(boolean)
|
||||
*/
|
||||
MULTI_TERM_QUERY,
|
||||
|
||||
/** @see Builder#withPassageRelevancyOverSpeed(boolean) */
|
||||
/**
|
||||
* @see Builder#withPassageRelevancyOverSpeed(boolean)
|
||||
*/
|
||||
PASSAGE_RELEVANCY_OVER_SPEED,
|
||||
|
||||
/** @see Builder#withWeightMatches(boolean) */
|
||||
/**
|
||||
* @see Builder#withWeightMatches(boolean)
|
||||
*/
|
||||
WEIGHT_MATCHES
|
||||
|
||||
// TODO: useQueryBoosts
|
||||
|
|
|
@ -205,17 +205,23 @@ public class FieldPhraseList {
|
|||
return text.toString();
|
||||
}
|
||||
|
||||
/** @return the termsOffsets */
|
||||
/**
|
||||
* @return the termsOffsets
|
||||
*/
|
||||
public List<Toffs> getTermsOffsets() {
|
||||
return termsOffsets;
|
||||
}
|
||||
|
||||
/** @return the boost */
|
||||
/**
|
||||
* @return the boost
|
||||
*/
|
||||
public float getBoost() {
|
||||
return boost;
|
||||
}
|
||||
|
||||
/** @return the termInfos */
|
||||
/**
|
||||
* @return the termInfos
|
||||
*/
|
||||
public List<TermInfo> getTermsInfos() {
|
||||
return termsInfos;
|
||||
}
|
||||
|
@ -323,7 +329,9 @@ public class FieldPhraseList {
|
|||
return sb.toString();
|
||||
}
|
||||
|
||||
/** @return the seqnum */
|
||||
/**
|
||||
* @return the seqnum
|
||||
*/
|
||||
public int getSeqnum() {
|
||||
return seqnum;
|
||||
}
|
||||
|
|
|
@ -344,13 +344,17 @@ public class FieldQuery {
|
|||
return termSetMap.get(fieldMatch ? field : null);
|
||||
}
|
||||
|
||||
/** @return QueryPhraseMap */
|
||||
/**
|
||||
* @return QueryPhraseMap
|
||||
*/
|
||||
public QueryPhraseMap getFieldTermMap(String fieldName, String term) {
|
||||
QueryPhraseMap rootMap = getRootMap(fieldName);
|
||||
return rootMap == null ? null : rootMap.subMap.get(term);
|
||||
}
|
||||
|
||||
/** @return QueryPhraseMap */
|
||||
/**
|
||||
* @return QueryPhraseMap
|
||||
*/
|
||||
public QueryPhraseMap searchPhrase(String fieldName, final List<TermInfo> phraseCandidate) {
|
||||
QueryPhraseMap root = getRootMap(fieldName);
|
||||
if (root == null) return null;
|
||||
|
|
|
@ -130,17 +130,23 @@ public class FieldTermStack {
|
|||
}
|
||||
}
|
||||
|
||||
/** @return field name */
|
||||
/**
|
||||
* @return field name
|
||||
*/
|
||||
public String getFieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
/** @return the top TermInfo object of the stack */
|
||||
/**
|
||||
* @return the top TermInfo object of the stack
|
||||
*/
|
||||
public TermInfo pop() {
|
||||
return termList.poll();
|
||||
}
|
||||
|
||||
/** @param termInfo the TermInfo object to be put on the top of the stack */
|
||||
/**
|
||||
* @param termInfo the TermInfo object to be put on the top of the stack
|
||||
*/
|
||||
public void push(TermInfo termInfo) {
|
||||
termList.push(termInfo);
|
||||
}
|
||||
|
|
|
@ -48,7 +48,9 @@ public class LukeMain {
|
|||
return frame;
|
||||
}
|
||||
|
||||
/** @return Returns {@code true} if GUI startup and initialization was successful. */
|
||||
/**
|
||||
* @return Returns {@code true} if GUI startup and initialization was successful.
|
||||
*/
|
||||
private static boolean createGUI() {
|
||||
// uncaught error handler
|
||||
MessageBroker messageBroker = MessageBroker.getInstance();
|
||||
|
|
|
@ -105,7 +105,9 @@ public class CircularLogBufferHandler extends Handler {
|
|||
listeners.remove(listener);
|
||||
}
|
||||
|
||||
/** @return Return a clone of the buffered records so far. */
|
||||
/**
|
||||
* @return Return a clone of the buffered records so far.
|
||||
*/
|
||||
public List<ImmutableLogRecord> getLogRecords() {
|
||||
synchronized (buffer) {
|
||||
return List.copyOf(buffer);
|
||||
|
|
|
@ -130,7 +130,9 @@ public class DirectIODirectory extends FilterDirectory {
|
|||
this(delegate, DEFAULT_MERGE_BUFFER_SIZE, DEFAULT_MIN_BYTES_DIRECT);
|
||||
}
|
||||
|
||||
/** @return the underlying file system directory */
|
||||
/**
|
||||
* @return the underlying file system directory
|
||||
*/
|
||||
public Path getDirectory() {
|
||||
return ((FSDirectory) in).getDirectory();
|
||||
}
|
||||
|
|
|
@ -99,7 +99,9 @@ public abstract class CandidateMatcher<T extends QueryMatch> {
|
|||
this.errors.put(queryId, e);
|
||||
}
|
||||
|
||||
/** @return the matches from this matcher */
|
||||
/**
|
||||
* @return the matches from this matcher
|
||||
*/
|
||||
final MultiMatchingQueries<T> finish(long buildTime, int queryCount) {
|
||||
doFinish();
|
||||
this.searchTime =
|
||||
|
|
|
@ -60,7 +60,9 @@ public class ExplainingMatch extends QueryMatch {
|
|||
this.explanation = explanation;
|
||||
}
|
||||
|
||||
/** @return the Explanation */
|
||||
/**
|
||||
* @return the Explanation
|
||||
*/
|
||||
public Explanation getExplanation() {
|
||||
return explanation;
|
||||
}
|
||||
|
|
|
@ -96,12 +96,16 @@ public class HighlightsMatch extends QueryMatch {
|
|||
this.hits = new TreeMap<>();
|
||||
}
|
||||
|
||||
/** @return a map of hits per field */
|
||||
/**
|
||||
* @return a map of hits per field
|
||||
*/
|
||||
public Map<String, Set<Hit>> getHits() {
|
||||
return Collections.unmodifiableMap(this.hits);
|
||||
}
|
||||
|
||||
/** @return the fields in which matches have been found */
|
||||
/**
|
||||
* @return the fields in which matches have been found
|
||||
*/
|
||||
public Set<String> getFields() {
|
||||
return Collections.unmodifiableSet(hits.keySet());
|
||||
}
|
||||
|
@ -118,7 +122,9 @@ public class HighlightsMatch extends QueryMatch {
|
|||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/** @return the total number of hits for the query */
|
||||
/**
|
||||
* @return the total number of hits for the query
|
||||
*/
|
||||
public int getHitCount() {
|
||||
int c = 0;
|
||||
for (Set<Hit> fieldhits : hits.values()) {
|
||||
|
|
|
@ -56,32 +56,44 @@ public class MatchingQueries<T extends QueryMatch> {
|
|||
return matches.get(queryId);
|
||||
}
|
||||
|
||||
/** @return all matches */
|
||||
/**
|
||||
* @return all matches
|
||||
*/
|
||||
public Collection<T> getMatches() {
|
||||
return matches.values();
|
||||
}
|
||||
|
||||
/** @return the number of queries that matched */
|
||||
/**
|
||||
* @return the number of queries that matched
|
||||
*/
|
||||
public int getMatchCount() {
|
||||
return matches.size();
|
||||
}
|
||||
|
||||
/** @return how long (in ns) it took to build the Presearcher query for the matcher run */
|
||||
/**
|
||||
* @return how long (in ns) it took to build the Presearcher query for the matcher run
|
||||
*/
|
||||
public long getQueryBuildTime() {
|
||||
return queryBuildTime;
|
||||
}
|
||||
|
||||
/** @return how long (in ms) it took to run the selected queries */
|
||||
/**
|
||||
* @return how long (in ms) it took to run the selected queries
|
||||
*/
|
||||
public long getSearchTime() {
|
||||
return searchTime;
|
||||
}
|
||||
|
||||
/** @return the number of queries passed to this CandidateMatcher during the matcher run */
|
||||
/**
|
||||
* @return the number of queries passed to this CandidateMatcher during the matcher run
|
||||
*/
|
||||
public int getQueriesRun() {
|
||||
return queriesRun;
|
||||
}
|
||||
|
||||
/** @return a List of any MatchErrors created during the matcher run */
|
||||
/**
|
||||
* @return a List of any MatchErrors created during the matcher run
|
||||
*/
|
||||
public Map<String, Exception> getErrors() {
|
||||
return errors;
|
||||
}
|
||||
|
|
|
@ -110,7 +110,9 @@ public class Monitor implements Closeable {
|
|||
queryIndex.addListener(listener);
|
||||
}
|
||||
|
||||
/** @return Statistics for the internal query index and cache */
|
||||
/**
|
||||
* @return Statistics for the internal query index and cache
|
||||
*/
|
||||
public QueryCacheStats getQueryCacheStats() throws IOException {
|
||||
return new QueryCacheStats(
|
||||
queryIndex.numDocs(), queryIndex.cacheSize(), queryIndex.getLastPurged());
|
||||
|
@ -264,7 +266,9 @@ public class Monitor implements Closeable {
|
|||
return queryIndex.getQuery(queryId);
|
||||
}
|
||||
|
||||
/** @return the number of queries (after decomposition) stored in this Monitor */
|
||||
/**
|
||||
* @return the number of queries (after decomposition) stored in this Monitor
|
||||
*/
|
||||
public int getDisjunctCount() throws IOException {
|
||||
return queryIndex.numDocs();
|
||||
}
|
||||
|
|
|
@ -113,7 +113,9 @@ public class MonitorConfiguration {
|
|||
return this;
|
||||
}
|
||||
|
||||
/** @return the QueryDecomposer used by the Monitor */
|
||||
/**
|
||||
* @return the QueryDecomposer used by the Monitor
|
||||
*/
|
||||
public QueryDecomposer getQueryDecomposer() {
|
||||
return queryDecomposer;
|
||||
}
|
||||
|
@ -131,12 +133,16 @@ public class MonitorConfiguration {
|
|||
return this;
|
||||
}
|
||||
|
||||
/** @return the value of Monitor's querycache garbage-collection frequency */
|
||||
/**
|
||||
* @return the value of Monitor's querycache garbage-collection frequency
|
||||
*/
|
||||
public long getPurgeFrequency() {
|
||||
return purgeFrequency;
|
||||
}
|
||||
|
||||
/** @return Get the units of the Monitor's querycache garbage-collection frequency */
|
||||
/**
|
||||
* @return Get the units of the Monitor's querycache garbage-collection frequency
|
||||
*/
|
||||
public TimeUnit getPurgeFrequencyUnits() {
|
||||
return purgeFrequencyUnits;
|
||||
}
|
||||
|
@ -153,7 +159,9 @@ public class MonitorConfiguration {
|
|||
return this;
|
||||
}
|
||||
|
||||
/** @return the size of the queryindex's in-memory buffer */
|
||||
/**
|
||||
* @return the size of the queryindex's in-memory buffer
|
||||
*/
|
||||
public int getQueryUpdateBufferSize() {
|
||||
return queryUpdateBufferSize;
|
||||
}
|
||||
|
|
|
@ -66,22 +66,30 @@ public class MonitorQuery {
|
|||
}
|
||||
}
|
||||
|
||||
/** @return this MonitorQuery's ID */
|
||||
/**
|
||||
* @return this MonitorQuery's ID
|
||||
*/
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/** @return this MonitorQuery's query */
|
||||
/**
|
||||
* @return this MonitorQuery's query
|
||||
*/
|
||||
public Query getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
/** @return this MonitorQuery's string representation */
|
||||
/**
|
||||
* @return this MonitorQuery's string representation
|
||||
*/
|
||||
public String getQueryString() {
|
||||
return queryString;
|
||||
}
|
||||
|
||||
/** @return this MonitorQuery's metadata */
|
||||
/**
|
||||
* @return this MonitorQuery's metadata
|
||||
*/
|
||||
public Map<String, String> getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue