Fix typos in various comments and log msgs ()

This commit is contained in:
Benedict Jin 2019-11-27 17:34:00 +08:00 committed by Jan Høydahl
parent 451e2da779
commit f70e21c91c
20 changed files with 21 additions and 21 deletions

View File

@ -1655,7 +1655,7 @@ public final class DirectPostingsFormat extends PostingsFormat {
}
}
// Docs + freqs + positions/offets:
// Docs + freqs + positions/offsets:
private final static class LowFreqDocsEnum extends PostingsEnum {
private int[] postings;
private final int posMult;

View File

@ -28,6 +28,6 @@ public interface TermFrequencyAttribute extends Attribute {
/** Set the custom term frequency of the current term within one document. */
public void setTermFrequency(int termFrequency);
/** Returns the custom term frequencey. */
/** Returns the custom term frequency. */
public int getTermFrequency();
}

View File

@ -24,7 +24,7 @@ import org.apache.lucene.util.AttributeReflector;
public class TermFrequencyAttributeImpl extends AttributeImpl implements TermFrequencyAttribute, Cloneable {
private int termFrequency = 1;
/** Initialize this attribute with term frequencey of 1 */
/** Initialize this attribute with term frequency of 1 */
public TermFrequencyAttributeImpl() {}
@Override

View File

@ -93,7 +93,7 @@ public final class CompetitiveImpactAccumulator {
assertConsistent();
}
/** Get the set of competitive freq and norm pairs, orderer by increasing freq and norm. */
/** Get the set of competitive freq and norm pairs, ordered by increasing freq and norm. */
public Collection<Impact> getCompetitiveFreqNormPairs() {
List<Impact> impacts = new ArrayList<>();
int maxFreqForLowerNorms = 0;

View File

@ -515,7 +515,7 @@ final class IntersectTermsEnum extends BaseTermsEnum {
assert savedStartTerm == null || term.compareTo(savedStartTerm) > 0: "saveStartTerm=" + savedStartTerm.utf8ToString() + " term=" + term.utf8ToString();
return term;
} else {
// This term is a prefix of a term accepted by the automaton, but is not itself acceptd
// This term is a prefix of a term accepted by the automaton, but is not itself accepted
}
isSubBlock = popPushNext();

View File

@ -239,7 +239,7 @@ final class IndexedDISI extends DocIdSetIterator {
return jumps;
}
// Flushes the offet & index jump-table for blocks. This should be the last data written to out
// Flushes the offset & index jump-table for blocks. This should be the last data written to out
// This method returns the blockCount for the blocks reachable for the jump_table or -1 for no jump-table
private static short flushBlockJumps(int[] jumps, int blockCount, IndexOutput out, long origo) throws IOException {
if (blockCount == 2) { // Jumps with a single real entry + NO_MORE_DOCS is just wasted space so we ignore that

View File

@ -193,7 +193,7 @@ class SimpleGeoJSONPolygonParser {
// OK, we recurse
} else {
upto = uptoStart;
throw newParseException("can only handle type FeatureCollection (if it has a single polygon geometry), Feature, Polygon or MutiPolygon, but got " + type);
throw newParseException("can only handle type FeatureCollection (if it has a single polygon geometry), Feature, Polygon or MultiPolygon, but got " + type);
}
} else if (key.equals("coordinates") && isValidGeometryPath(path)) {
if (o instanceof List == false) {

View File

@ -27,7 +27,7 @@ abstract class DocValuesIterator extends DocIdSetIterator {
* {@code target} must be greater than or equal to the current
* {@link #docID() doc ID} and must be a valid doc ID, ie. &ge; 0 and
* &lt; {@code maxDoc}.
* After this method returns, {@link #docID()} retuns {@code target}. */
* After this method returns, {@link #docID()} returns {@code target}. */
public abstract boolean advanceExact(int target) throws IOException;
}

View File

@ -60,7 +60,7 @@ final class DocumentsWriterPerThread {
private Throwable abortingException;
final void onAbortingException(Throwable throwable) {
assert abortingException == null: "aborting excpetion has already been set";
assert abortingException == null: "aborting exception has already been set";
abortingException = throwable;
}

View File

@ -275,7 +275,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
private final Map<String,FieldDimensions> dimensions;
// TODO: we should similarly catch an attempt to turn
// norms back on after they were already ommitted; today
// norms back on after they were already committed; today
// we silently discard the norm but this is badly trappy
private int lowestUnassignedFieldNumber = -1;

View File

@ -250,13 +250,13 @@ final class FieldUpdatesBuffer {
@Override
public int hashCode() {
throw new UnsupportedOperationException(
"this struct should not be use in map or other data-stuctures that use hashCode / equals");
"this struct should not be use in map or other data-structures that use hashCode / equals");
}
@Override
public boolean equals(Object obj) {
throw new UnsupportedOperationException(
"this struct should not be use in map or other data-stuctures that use hashCode / equals");
"this struct should not be use in map or other data-structures that use hashCode / equals");
}
}

View File

@ -114,7 +114,7 @@ public class SegmentWriteState {
}
// currently only used by assert? clean up and make real check?
// either it's a segment suffix (_X_Y) or it's a parseable generation
// either it's a segment suffix (_X_Y) or it's a parsable generation
// TODO: this is very confusing how ReadersAndUpdates passes generations via
// this mechanism, maybe add 'generation' explicitly to ctor create the 'actual suffix' here?
private boolean assertSegmentSuffix(String segmentSuffix) {

View File

@ -422,7 +422,7 @@ public class IndexSearcher {
return count;
}
// general case: create a collecor and count matches
// general case: create a collector and count matches
final CollectorManager<TotalHitCountCollector, Integer> collectorManager = new CollectorManager<TotalHitCountCollector, Integer>() {
@Override

View File

@ -162,7 +162,7 @@ public abstract class ReferenceManager<G> implements Closeable {
private void doMaybeRefresh() throws IOException {
// it's ok to call lock() here (blocking) because we're supposed to get here
// from either maybeRefreh() or maybeRefreshBlocking(), after the lock has
// from either maybeRefresh() or maybeRefreshBlocking(), after the lock has
// already been obtained. Doing that protects us from an accidental bug
// where this method will be called outside the scope of refreshLock.
// Per ReentrantLock's javadoc, calling lock() by the same thread more than

View File

@ -496,7 +496,7 @@ public class Automaton implements Accountable {
int upto = t.transitionUpto;
if (upto == states[2*t.source]) {
// Transition isn't initialzed yet (this is the first transition); don't check:
// Transition isn't initialized yet (this is the first transition); don't check:
return true;
}

View File

@ -31,7 +31,7 @@ public class CharacterRunAutomaton extends RunAutomaton {
/**
* Construct specifying maxDeterminizedStates.
* @param a Automaton to match
* @param maxDeterminizedStates maximum number of states that the automataon
* @param maxDeterminizedStates maximum number of states that the automaton
* can have once determinized. If more states are required to determinize
* it then a TooComplexToDeterminizeException is thrown.
*/

View File

@ -273,7 +273,7 @@ public class CompiledAutomaton implements Accountable {
if (transition.min < leadLabel) {
maxIndex = i;
} else {
// Transitions are alway sorted
// Transitions are always sorted
break;
}
}

View File

@ -48,7 +48,7 @@ import org.apache.lucene.util.BitUtil;
* <li>Ints: if BitsPerValue is <tt>0</tt>, then there is nothing to read and
* all values perfectly match the result of the function. Otherwise, these
* are the {@link PackedInts packed} deltas from the expected value
* (computed from the function) using exaclty BitsPerValue bits per value.
* (computed from the function) using exactly BitsPerValue bits per value.
* </ul>
* @see MonotonicBlockPackedReader
* @lucene.internal

View File

@ -855,7 +855,7 @@ public class PackedInts {
case PACKED_SINGLE_BLOCK:
return new DirectPacked64SingleBlockReader(bitsPerValue, valueCount, in);
default:
throw new AssertionError("Unknwown format: " + format);
throw new AssertionError("Unknown format: " + format);
}
}

View File

@ -306,7 +306,7 @@ public class TokenSourcesTest extends BaseTokenStreamTestCase {
int curOffset;
/** Just make a token with the text, and set the payload
* to the text as well. Offets increment "naturally". */
* to the text as well. Offsets increment "naturally". */
private Token getToken(String text) {
Token t = new Token(text, curOffset, curOffset+text.length());
t.setPayload(new BytesRef(text));