fix javadoc links in ngram contrib

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@808221 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mark Robert Miller 2009-08-26 23:26:10 +00:00
parent f39dadfb26
commit 8cc45886d5
2 changed files with 3 additions and 3 deletions

View File

@ -109,7 +109,7 @@ public class EdgeNGramTokenizer extends Tokenizer {
/**
* Creates EdgeNGramTokenizer that can generate n-grams in the sizes of the given range
*
* @param factory {@link AttributeFactory} to use
* @param factory {@link org.apache.lucene.util.AttributeSource.AttributeFactory} to use
* @param input {@link Reader} holding the input to be tokenized
* @param side the {@link Side} from which to chop off an n-gram
* @param minGram the smallest n-gram to generate
@ -148,7 +148,7 @@ public class EdgeNGramTokenizer extends Tokenizer {
/**
* Creates EdgeNGramTokenizer that can generate n-grams in the sizes of the given range
*
* @param factory {@link AttributeFactory} to use
* @param factory {@link org.apache.lucene.util.AttributeSource.AttributeFactory} to use
* @param input {@link Reader} holding the input to be tokenized
* @param sideLabel the name of the {@link Side} from which to chop off an n-gram
* @param minGram the smallest n-gram to generate

View File

@ -68,7 +68,7 @@ public class NGramTokenizer extends Tokenizer {
/**
* Creates NGramTokenizer with given min and max n-grams.
* @param factory {@link AttributeFactory} to use
* @param factory {@link org.apache.lucene.util.AttributeSource.AttributeFactory} to use
* @param input {@link Reader} holding the input to be tokenized
* @param minGram the smallest n-gram to generate
* @param maxGram the largest n-gram to generate