SOLR-12773: Fix typos widely spread around codebase

This commit is contained in:
Erick Erickson 2018-09-16 09:46:20 -07:00
parent 910a1c0312
commit b2b597b038
27 changed files with 46 additions and 46 deletions

View File

@ -3199,7 +3199,7 @@ Other
* LUCENE-6845: SpanScorer is now merged into Spans (Alan Woodward, David Smiley)
* LUCENE-6887: DefaultSimilarity is deprecated, use ClassicSimilarity for equivilent behavior,
* LUCENE-6887: DefaultSimilarity is deprecated, use ClassicSimilarity for equivalent behavior,
or consider switching to BM25Similarity which will become the new default in Lucene 6.0 (hossman)
* LUCENE-6893: factor out CorePlusQueriesParser from CorePlusExtensionsParser

View File

@ -73,7 +73,7 @@ public abstract class MultiFunction extends ValueSource {
}
/**
* Equivilent the the {@code FunctionValues[]} method with the same name, but optimized for
* Equivalent to the {@code FunctionValues[]} method with the same name, but optimized for
* dealing with exactly 2 arguments.
*
* @return true if <em>both</em> of the specified <code>values</code>
@ -85,7 +85,7 @@ public abstract class MultiFunction extends ValueSource {
}
/**
* Equivilent the the {@code FunctionValues[]} method with the same name, but optimized for
* Equivalent to the {@code FunctionValues[]} method with the same name, but optimized for
* dealing with exactly 2 arguments.
*
* @return true if <em>either</em> of the specified <code>values</code>

View File

@ -16790,7 +16790,7 @@ the canonical format when generating dates from "Date Math" strings
(particularly as it pertains to milliseconds ending in trailing zeros).
As a result equivalent dates could not always be compared properly.
This problem is corrected in Solr 1.3, but DateField users that might
have been affected by indexing inconsistent formats of equivilent
have been affected by indexing inconsistent formats of equivalent
dates (ie: 1995-12-31T23:59:59Z vs 1995-12-31T23:59:59.000Z) may want
to consider reindexing to correct these inconsistencies. Users who
depend on some of the the "broken" behavior of DateField in Solr 1.2

View File

@ -180,7 +180,7 @@ public class StatsField {
}
/**
* the equivilent stats if "calcdistinct" is specified
* the equivalent stats if "calcdistinct" is specified
* @see Stat#countDistinct
* @see Stat#distinctValues
*/
@ -639,7 +639,7 @@ public class StatsField {
final NumberType hashableNumType = getHashableNumericType(field);
// some sane defaults
int log2m = 13; // roughly equivilent to "cardinality='0.33'"
int log2m = 13; // roughly equivalent to "cardinality='0.33'"
int regwidth = 6; // with decent hash, this is plenty for all valid long hashes
if (NumberType.FLOAT.equals(hashableNumType) || NumberType.INTEGER.equals(hashableNumType)) {

View File

@ -264,10 +264,10 @@ public class CurrencyFieldType extends FieldType implements SchemaAware, Resourc
* <p>
* For example: If the default Currency specified for a field is
* <code>USD</code>, then the values returned by this value source would
* represent the equivilent number of "cents" (ie: value in dollars * 100)
* represent the equivalent number of "cents" (ie: value in dollars * 100)
* after converting each document's native currency to USD -- because the
* default fractional digits for <code>USD</code> is "<code>2</code>".
* So for a document whose indexed value was currently equivilent to
* So for a document whose indexed value was currently equivalent to
* "<code>5.43,USD</code>" using the the exchange provider for this field,
* this ValueSource would return a value of "<code>543</code>"
* </p>
@ -294,9 +294,9 @@ public class CurrencyFieldType extends FieldType implements SchemaAware, Resourc
* <p>
* For example: If the <code>targetCurrencyCode</code> param is set to
* <code>USD</code>, then the values returned by this value source would
* represent the equivilent number of dollars after converting each
* represent the equivalent number of dollars after converting each
* document's raw value to <code>USD</code>. So for a document whose
* indexed value was currently equivilent to "<code>5.43,USD</code>"
* indexed value was currently equivalent to "<code>5.43,USD</code>"
* using the the exchange provider for this field, this ValueSource would
* return a value of "<code>5.43</code>"
* </p>

View File

@ -1250,7 +1250,7 @@ public abstract class FieldType extends FieldProperties {
/**
* The appropriate <code>SortedSetSelector.Type</code> option for this <code>MultiValueSelector</code>,
* may be null if there is no equivilent
* may be null if there is no equivalent
*/
public SortedSetSelector.Type getSortedSetSelectorType() {
return sType;
@ -1258,7 +1258,7 @@ public abstract class FieldType extends FieldProperties {
/**
* The appropriate <code>SortedNumericSelector.Type</code> option for this <code>MultiValueSelector</code>,
* may be null if there is no equivilent
* may be null if there is no equivalent
*/
public SortedNumericSelector.Type getSortedNumericSelectorType() {
return nType;

View File

@ -360,7 +360,7 @@ public class IndexSchema {
for (FieldInfo f : MultiFields.getMergedFieldInfos(reader)) {
if (f.getDocValuesType() == DocValuesType.NONE) {
// we have a field (of some kind) in the reader w/o DocValues
// if we have an equivilent indexed=true field in the schema, trust it's uninversion type (if any)
// if we have an equivalent indexed=true field in the schema, trust it's uninversion type (if any)
final SchemaField sf = getFieldOrNull(f.name);
if (sf != null && sf.indexed()) {
final UninvertingReader.Type type = sf.getType().getUninversionType(sf);

View File

@ -129,10 +129,10 @@ import org.slf4j.LoggerFactory;
* <code>fieldRegex</code> selector and a destination <code>pattern</code>, a "short hand" syntax
* is support for convinience: The <code>pattern</code> and <code>replacement</code> may be specified
* at the top level, omitting <code>source</code> and <code>dest</code> declarations completely, and
* the <code>pattern</code> will be used to construct an equivilent <code>source</code> selector internally.
* the <code>pattern</code> will be used to construct an equivalent <code>source</code> selector internally.
* </p>
* <p>
* For example, both of the following configurations are equivilent:
* For example, both of the following configurations are equivalent:
* </p>
* <pre class="prettyprint">
* &lt;!-- full syntax --&gt;

View File

@ -136,7 +136,7 @@ public class TolerantUpdateProcessorFactory extends UpdateRequestProcessorFactor
throw new SolrException(ErrorCode.BAD_REQUEST, "'"+MAX_ERRORS_PARAM + "' must either be non-negative, or -1 to indicate 'unlimiited': " + maxErrors);
}
// NOTE: even if 0==maxErrors, we still inject processor into chain so respones has expected header info
// NOTE: even if 0==maxErrors, we still inject processor into chain so responses has expected header info
return new TolerantUpdateProcessor(req, rsp, next, maxErrors, distribPhase);
}
}

View File

@ -487,7 +487,7 @@
<str name="replacement">key_feat$1</str>
</lst>
</processor>
<!-- equivilent sugar syntax to above, with slightly diff destination name -->
<!-- equivalent sugar syntax to above, with slightly diff destination name -->
<processor class="solr.processor.CloneFieldUpdateProcessorFactory">
<str name="pattern">^feat(.*)s$</str>
<str name="replacement">best_feat$1</str>

View File

@ -878,7 +878,7 @@ public class BasicFunctionalityTest extends SolrTestCaseJ4 {
assertU(adoc("id", "6", "bday", "NOW+2YEARS"));
assertU(commit());
// a ridiculoulsy long date math expression that's still equivilent to july4
// a ridiculoulsy long date math expression that's still equivalent to july4
final StringBuilder july4Long = new StringBuilder(july4);
final int iters = atLeast(10);
for (int i = 0; i < iters; i++) {

View File

@ -55,7 +55,7 @@ import org.slf4j.LoggerFactory;
* Tests randomized JSON Facets, sometimes using query 'join' domain transfers and/or domain 'filter' options
* </p>
* <p>
* The results of each facet constraint count will be compared with a verification query using an equivilent filter
* The results of each facet constraint count will be compared with a verification query using an equivalent filter
* </p>
*
* @see TestCloudPivotFacet
@ -422,7 +422,7 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
/**
* Given a set of (potentially nested) term facets, and a base query string, asserts that
* the actual counts returned when executing that query with those facets match the expected results
* of filtering on the equivilent facet terms+domain
* of filtering on the equivalent facet terms+domain
*/
private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck,
Map<String,TermFacet> expected,
@ -465,7 +465,7 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
/**
* Recursive Helper method that walks the actual facet response, comparing the counts to the expected output
* based on the equivilent filters generated from the original TermFacet.
* based on the equivalent filters generated from the original TermFacet.
*/
private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck,
final Map<String,TermFacet> expected,
@ -781,7 +781,7 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
/**
* Given some original SolrParams, returns new SolrParams where the original "q" param is wrapped
* as needed to apply the equivilent transformation to a query as this domain would to a facet
* as needed to apply the equivalent transformation to a query as this domain would to a facet
*/
public SolrParams applyDomainToQuery(String safeKey, SolrParams in) {
assert null == in.get(safeKey); // shouldn't be possible if every facet uses a unique key string

View File

@ -305,7 +305,7 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
/**
* Given a set of term facets, and top level query strings, asserts that
* the SKG stats for each facet term returned when executing that query with those foreground/background
* queries match the expected results of executing the equivilent queries in isolation.
* queries match the expected results of executing the equivalent queries in isolation.
*
* @see #verifySKGResults
*/
@ -355,7 +355,7 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
/**
* Recursive helper method that walks the actual facet response, comparing the SKG results to
* the expected output based on the equivilent filters generated from the original TermFacet.
* the expected output based on the equivalent filters generated from the original TermFacet.
*/
private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck,
final Map<String,TermFacet> expected,

View File

@ -161,7 +161,7 @@ public class PhrasesIdentificationComponentTest extends SolrTestCaseJ4 {
}
public void testWhiteBoxPhraseParsingShortInput() throws Exception {
// for input this short, either of these fields should be (mostly) equivilent
// for input this short, either of these fields should be (mostly) equivalent
final Map<String,Integer> fields = new TreeMap<>();
fields.put("multigrams_body", 7);
fields.put("multigrams_body_short", 3);

View File

@ -211,7 +211,7 @@ public class StatsComponentTest extends SolrTestCaseJ4 {
// we should be able to compute exact same stats for a field even
// when we specify it using the "field()" function, or use other
// identify equivilent functions
// identify equivalent functions
for (String param : new String[] {
// bare
"{!key="+key+" ex=key_ex_tag}" + f,
@ -543,7 +543,7 @@ public class StatsComponentTest extends SolrTestCaseJ4 {
// we should be able to compute exact same stats for a field even
// when we specify it using the "field()" function, or use other
// identify equivilent functions
// identify equivalent functions
for (String param : new String[] {
// bare
"{!key="+key+" ex=key_ex_tag}" + f,
@ -628,7 +628,7 @@ public class StatsComponentTest extends SolrTestCaseJ4 {
// we should be able to compute exact same stats & stats.facet for a field even
// when we specify it using the "field()" function, or use other
// identify equivilent functions
// identify equivalent functions
for (String param : new String[] {
// bare
"{!key="+f+" ex=key_ex_tag}" + f,
@ -1697,7 +1697,7 @@ public class StatsComponentTest extends SolrTestCaseJ4 {
"need to note in upgrade instructions and probably adjust hueristic",
8, HLL.MAXIMUM_REGWIDTH_PARAM);
// all of these should produce equivilent HLLOptions (Long, Double, or String using defaults)
// all of these should produce equivalent HLLOptions (Long, Double, or String using defaults)
SolrParams[] longDefaultParams = new SolrParams[] {
// basic usage
params("cardinality","true"),
@ -1733,7 +1733,7 @@ public class StatsComponentTest extends SolrTestCaseJ4 {
}
// all of these should produce equivilent HLLOptions (Int, Float, or ValueSource using defaults)
// all of these should produce equivalent HLLOptions (Int, Float, or ValueSource using defaults)
SolrParams[] intDefaultParams = new SolrParams[] {
// basic usage
params("cardinality","true"),

View File

@ -3040,7 +3040,7 @@ public class SimpleFacetsTest extends SolrTestCaseJ4 {
,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]"
);
// these should have equivilent behavior (multivalued 'other' param: top level vs local)
// these should have equivalent behavior (multivalued 'other' param: top level vs local)
for (SolrQueryRequest req : new SolrQueryRequest[] {
req("q", "id_i1:[42 TO 47]"
,"facet","true"

View File

@ -486,7 +486,7 @@ public class CurrencyFieldTypeTest extends SolrTestCaseJ4 {
clearIndex();
// NOTE: in our test conversions EUR uses an asynetric echange rate
// these are the equivilent values when converting to: USD EUR GBP
// these are the equivalent values when converting to: USD EUR GBP
assertU(adoc("id", "" + 1, fieldName, "10.00,USD")); // 10.00,USD 25.00,EUR 5.00,GBP
assertU(adoc("id", "" + 2, fieldName, "15.00,EUR")); // 7.50,USD 15.00,EUR 7.50,GBP
assertU(adoc("id", "" + 3, fieldName, "6.00,GBP")); // 12.00,USD 12.00,EUR 6.00,GBP

View File

@ -3893,7 +3893,7 @@ public class TestPointFields extends SolrTestCaseJ4 {
for (Object value : values) {
// ideally we should require that all input values be diff forms of the same logical value
// (ie '"42"' vs 'new Integer(42)') and assert that each produces an equivilent list of IndexableField objects
// (ie '"42"' vs 'new Integer(42)') and assert that each produces an equivalent list of IndexableField objects
// but that doesn't seem to work -- appears not all IndexableField classes override Object.equals?
final List<IndexableField> result = callAndCheckCreateFields(fieldName, pointType, value);
assertNotNull(value + " => null", result);

View File

@ -295,7 +295,7 @@ public class TestSortableTextField extends SolrTestCaseJ4 {
for (String searchF : Arrays.asList("whitespace_stxt", "whitespace_plain_txt",
"whitespace_max3_stxt", "whitespace_max6_stxt",
"whitespace_max0_stxt", "whitespace_maxNeg_stxt")) {
// maxChars of 0 or neg should be equivilent to no max at all
// maxChars of 0 or neg should be equivalent to no max at all
for (String sortF : Arrays.asList("whitespace_stxt", "whitespace_plain_str",
"whitespace_max0_stxt", "whitespace_maxNeg_stxt")) {
@ -484,7 +484,7 @@ public class TestSortableTextField extends SolrTestCaseJ4 {
* tests that a SortableTextField using KeywordTokenzier (w/docValues) behaves exactly the same as
* StrFields that it's copied to for quering and sorting
*/
public void testRandomStrEquivilentBehavior() throws Exception {
public void testRandomStrEquivalentBehavior() throws Exception {
final List<String> test_fields = Arrays.asList("keyword_stxt", "keyword_dv_stxt",
"keyword_s_dv", "keyword_s");
// we use embedded client instead of assertQ: we want to compare the responses from multiple requests

View File

@ -51,7 +51,7 @@ public class CurrencyRangeFacetCloudTest extends SolrCloudTestCase {
private static final List<String> STR_VALS = Arrays.asList("x0", "x1", "x2");
// NOTE: in our test conversions EUR uses an asynetric echange rate
// these are the equivilent values relative to: USD EUR GBP
// these are the equivalent values relative to: USD EUR GBP
private static final List<String> VALUES = Arrays.asList("10.00,USD", // 10.00,USD 25.00,EUR 5.00,GBP
"15.00,EUR", // 7.50,USD 15.00,EUR 7.50,GBP
"6.00,GBP", // 12.00,USD 12.00,EUR 6.00,GBP

View File

@ -710,7 +710,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
// multiple params for picking groupHead should work as long as only one is non-null
// sort used
for (SolrParams collapse : new SolrParams[] {
// these should all be equivilently valid
// these should all be equally valid
params("fq", "{!collapse field="+group+" nullPolicy=collapse sort='test_i asc'"+hint+"}"),
params("fq", "{!collapse field="+group+" nullPolicy=collapse min='' sort='test_i asc'"+hint+"}"),
params("fq", "{!collapse field="+group+" nullPolicy=collapse max='' sort='test_i asc'"+hint+"}"),

View File

@ -243,7 +243,7 @@ public class TestRangeQuery extends SolrTestCaseJ4 {
"foo_ti","foo_tl","foo_tf","foo_td" };
// NOTE: foo_s supports ranges, but for the arrays below we are only
// interested in fields that support *equivilent* ranges -- strings
// interested in fields that support *equivalent* ranges -- strings
// are not ordered the same as ints/longs, so we can't test the ranges
// for equivilence across diff fields.
//

View File

@ -159,14 +159,14 @@ public class SortByFunctionTest extends SolrTestCaseJ4 {
// then we lose precision in sorting; so lets sanity check that our JVM isn't doing something wacky
// in converstion that violates the principle of the test
assertEquals("WTF? small longs cast to double aren't equivilent?",
assertEquals("WTF? small longs cast to double aren't equivalent?",
(double)A, (double)B, 0.0D);
assertEquals("WTF? small longs cast to double aren't equivilent?",
assertEquals("WTF? small longs cast to double aren't equivalent?",
(double)A, (double)C, 0.0D);
assertEquals("WTF? big longs cast to double aren't equivilent?",
assertEquals("WTF? big longs cast to double aren't equivalent?",
(double)Z, (double)Y, 0.0D);
assertEquals("WTF? big longs cast to double aren't equivilent?",
assertEquals("WTF? big longs cast to double aren't equivalent?",
(double)Z, (double)X, 0.0D);
int docId = 0;

View File

@ -41,7 +41,7 @@ Solr can sort query responses according to:
* A single-valued TextField that uses an analyzer (such as the KeywordTokenizer) that produces only a single term per document. TextField does not support `docValues="true"`, but a DocValue-like structure will be built on the fly at runtime.
** *NOTE:* If you want to be able to sort on a field whose contents you want to tokenize to facilitate searching, <<copying-fields.adoc#copying-fields,use a `copyField` directive>> in the the Schema to clone the field. Then search on the field and sort on its clone.
In the case of primitive fields, or SortableTextFields, that are `multiValued="true"` the representative value used for each doc when sorting depends on the sort direction: The minimum value in each document is used for ascending (`asc`) sorting, while the maximal value in each document is used for descending (`desc`) sorting. This default behavior is equivilent to explicitly sorting using the 2 argument `<<function-queries.adoc#field-function,field()>>` function: `sort=field(name,min) asc` and `sort=field(name,max) desc`
In the case of primitive fields, or SortableTextFields, that are `multiValued="true"` the representative value used for each doc when sorting depends on the sort direction: The minimum value in each document is used for ascending (`asc`) sorting, while the maximal value in each document is used for descending (`desc`) sorting. This default behavior is equivalent to explicitly sorting using the 2 argument `<<function-queries.adoc#field-function,field()>>` function: `sort=field(name,min) asc` and `sort=field(name,max) desc`
The table below explains how Solr responds to various settings of the `sort` parameter.

View File

@ -56,7 +56,7 @@ If no "type" is specified (either explicitly or implicitly) then the <<the-stand
`fq={!df=summary}solr rocks`
is equivilent to:
is equivalent to:
`fq={!type=lucene df=summary}solr rocks`

View File

@ -75,7 +75,7 @@ import org.jsoup.select.NodeVisitor;
*
* <p>
* This tool parses the generated HTML site, looking for these situations in order to fail the build, since
* (depending on the type of check) these situations will result in inconsistent/broken HTML, or equivilent
* (depending on the type of check) these situations will result in inconsistent/broken HTML, or equivalent
* problems in the generated PDF.
* </p>
* <p>

View File

@ -491,7 +491,7 @@ public abstract class BaseDistributedSearchTestCase extends SolrTestCaseJ4 {
/**
* Indexes the document in both the control client and the specified client asserting
* that the respones are equivilent
* that the responses are equivalent
*/
protected UpdateResponse indexDoc(SolrClient client, SolrParams params, SolrInputDocument... sdocs) throws IOException, SolrServerException {
UpdateResponse controlRsp = add(controlClient, params, sdocs);