LUCENE-9394: fix and suppress warnings (#1563)

* LUCENE-9394: fix and suppress warnings in lucene/*
* Change type of ValuesSource context from raw Map to Map<Object, Object>
This commit is contained in:
Michael Sokolov 2020-06-12 07:25:31 -04:00 committed by GitHub
parent cf8f83cef9
commit 26075fc1dc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
120 changed files with 427 additions and 576 deletions

View File

@ -65,7 +65,9 @@ public class ConcatenateGraphFilterFactory extends TokenFilterFactory {
public ConcatenateGraphFilterFactory(Map<String, String> args) {
super(args);
Version luceneMatchVersion = getLuceneMatchVersion();
if (luceneMatchVersion.onOrAfter(Version.LUCENE_8_4_0)) {
@SuppressWarnings("deprecation")
Version LUCENE_8_4_0 = Version.LUCENE_8_4_0;
if (luceneMatchVersion.onOrAfter(LUCENE_8_4_0)) {
tokenSeparator = getCharacter(args, "tokenSeparator", ConcatenateGraphFilter.DEFAULT_TOKEN_SEPARATOR);
} else {
boolean preserveSep = getBoolean(args, "preserveSep", ConcatenateGraphFilter.DEFAULT_PRESERVE_SEP);

View File

@ -27,6 +27,7 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
* @since 7.0.0
* @lucene.spi {@value #NAME}
*/
@Deprecated
public class FixBrokenOffsetsFilterFactory extends TokenFilterFactory {
/** SPI name */

View File

@ -33,8 +33,6 @@ import org.apache.lucene.analysis.util.ResourceLoaderAware;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.search.PhraseQuery;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.*;
/**
* Factory for {@link WordDelimiterFilter}.
* <pre class="prettyprint">
@ -59,6 +57,23 @@ import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.*;
@Deprecated
public class WordDelimiterFilterFactory extends TokenFilterFactory implements ResourceLoaderAware {
private static final int CATENATE_ALL = WordDelimiterFilter.CATENATE_ALL;
private static final int CATENATE_NUMBERS = WordDelimiterFilter.CATENATE_NUMBERS;
private static final int CATENATE_WORDS = WordDelimiterFilter.CATENATE_WORDS;
private static final int GENERATE_NUMBER_PARTS = WordDelimiterFilter.GENERATE_NUMBER_PARTS;
private static final int GENERATE_WORD_PARTS = WordDelimiterFilter.GENERATE_WORD_PARTS;
private static final int PRESERVE_ORIGINAL = WordDelimiterFilter.PRESERVE_ORIGINAL;
private static final int SPLIT_ON_CASE_CHANGE = WordDelimiterFilter.SPLIT_ON_CASE_CHANGE;
private static final int SPLIT_ON_NUMERICS = WordDelimiterFilter.SPLIT_ON_NUMERICS;
private static final int STEM_ENGLISH_POSSESSIVE = WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE;
private static final int ALPHA = WordDelimiterFilter.ALPHA;
private static final int ALPHANUM = WordDelimiterFilter.ALPHANUM;
private static final int DIGIT = WordDelimiterFilter.DIGIT;
private static final int LOWER = WordDelimiterFilter.LOWER;
private static final int SUBWORD_DELIM = WordDelimiterFilter.SUBWORD_DELIM;
private static final int UPPER = WordDelimiterFilter.UPPER;
/** SPI name */
public static final String NAME = "wordDelimiter";

View File

@ -256,6 +256,7 @@ public class TestBugInSomething extends BaseTokenStreamTestCase {
-24, 106, -16, 126, 115, -105, 97, 65, -33, 57, 44, -1, 123, -68, 100, 13, -41, -64,
-119, 0, 92, 94, -36, 53, -9, -102, -18, 90, 94, -26, 31, 71, -20
};
@SuppressWarnings("deprecation")
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {

View File

@ -142,7 +142,9 @@ public class TestRandomChains extends BaseTokenStreamTestCase {
}
private static final Map<Constructor<?>,Predicate<Object[]>> brokenConstructors = new HashMap<>();
static {
static { initBrokenConstructors(); }
@SuppressWarnings("deprecation")
private static void initBrokenConstructors() {
try {
brokenConstructors.put(
LimitTokenCountFilter.class.getConstructor(TokenStream.class, int.class),

View File

@ -48,7 +48,10 @@ import org.apache.lucene.util.SetOnce.AlreadySetException;
import org.apache.lucene.util.Version;
public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
@SuppressWarnings("deprecation")
private static final Version LUCENE_8_0_0 = Version.LUCENE_8_0_0;
// Test some examples (TODO: we only check behavior, we may need something like TestRandomChains...)
public void testWhitespaceFactoryWithFolding() throws Exception {
@ -105,7 +108,7 @@ public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
public void testFactoryHtmlStripClassicFolding() throws Exception {
CustomAnalyzer a = CustomAnalyzer.builder()
.withDefaultMatchVersion(Version.LUCENE_8_0_0)
.withDefaultMatchVersion(LUCENE_8_0_0)
.addCharFilter(HTMLStripCharFilterFactory.class)
.withTokenizer(ClassicTokenizerFactory.class)
.addTokenFilter(ASCIIFoldingFilterFactory.class, "preserveOriginal", "true")
@ -124,7 +127,7 @@ public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
assertSame(LowerCaseFilterFactory.class, tokenFilters.get(1).getClass());
assertEquals(100, a.getPositionIncrementGap("dummy"));
assertEquals(1000, a.getOffsetGap("dummy"));
assertSame(Version.LUCENE_8_0_0, a.getVersion());
assertSame(LUCENE_8_0_0, a.getVersion());
assertAnalyzesTo(a, "<p>foo bar</p> FOO BAR",
new String[] { "foo", "bar", "foo", "bar" },
@ -137,7 +140,7 @@ public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
public void testHtmlStripClassicFolding() throws Exception {
CustomAnalyzer a = CustomAnalyzer.builder()
.withDefaultMatchVersion(Version.LUCENE_8_0_0)
.withDefaultMatchVersion(LUCENE_8_0_0)
.addCharFilter("htmlstrip")
.withTokenizer("classic")
.addTokenFilter("asciifolding", "preserveOriginal", "true")
@ -156,7 +159,7 @@ public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
assertSame(LowerCaseFilterFactory.class, tokenFilters.get(1).getClass());
assertEquals(100, a.getPositionIncrementGap("dummy"));
assertEquals(1000, a.getOffsetGap("dummy"));
assertSame(Version.LUCENE_8_0_0, a.getVersion());
assertSame(LUCENE_8_0_0, a.getVersion());
assertAnalyzesTo(a, "<p>foo bar</p> FOO BAR",
new String[] { "foo", "bar", "foo", "bar" },

View File

@ -75,6 +75,7 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
builder.add(new CharsRef("mykeyword"), new CharsRef("mysynonym"), true);
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
tokenizer.setReader(new StringReader("mykeyword"));
@SuppressWarnings("deprecation")
SynonymFilter filter = new SynonymFilter(tokenizer, builder.build(), true);
ConcatenateGraphFilter stream = new ConcatenateGraphFilter(filter);
assertTokenStreamContents(stream, new String[] {"mykeyword", "mysynonym"}, null, null, new int[] { 1, 0 });
@ -87,6 +88,7 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
Tokenizer tokenStream = new MockTokenizer(MockTokenizer.WHITESPACE, true);
String input = "mykeyword another keyword";
tokenStream.setReader(new StringReader(input));
@SuppressWarnings("deprecation")
SynonymFilter filter = new SynonymFilter(tokenStream, builder.build(), true);
ConcatenateGraphFilter stream = new ConcatenateGraphFilter(filter, SEP_LABEL, false, 100);
String[] expectedOutputs = new String[2];
@ -145,6 +147,7 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
}
MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
tokenizer.setReader(new StringReader(valueBuilder.toString()));
@SuppressWarnings("deprecation")
SynonymFilter filter = new SynonymFilter(tokenizer, builder.build(), true);
int count;

View File

@ -64,8 +64,9 @@ public class TestConcatenateGraphFilterFactory extends BaseTokenStreamFactoryTes
tokenizer.setReader(reader);
TokenStream stream = tokenizer;
stream = new StopFilter(stream, StopFilter.makeStopSet("B2"));
@SuppressWarnings("deprecation") Version LUCENE_8_0_0 = Version.LUCENE_8_0_0;
stream = tokenFilterFactory("ConcatenateGraph",
Version.LUCENE_8_0_0,
LUCENE_8_0_0,
"preserveSep", "false"
).create(stream);
assertTokenStreamContents(stream, new String[]{output});

View File

@ -29,6 +29,7 @@ import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
@Deprecated
public class TestFixBrokenOffsetsFilter extends BaseTokenStreamTestCase {
public void testBogusTermVectors() throws IOException {

View File

@ -76,6 +76,7 @@ public class TestLimitTokenPositionFilter extends BaseTokenStreamTestCase {
SynonymMap.Builder.join(new String[]{"dopple", "ganger"}, multiWordCharsRef);
builder.add(new CharsRef("two"), multiWordCharsRef.get(), true);
SynonymMap synonymMap = builder.build();
@SuppressWarnings("deprecation")
TokenStream stream = new SynonymFilter(tokenizer, synonymMap, true);
stream = new LimitTokenPositionFilter(stream, 3, consumeAll);

View File

@ -23,7 +23,7 @@ import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.KeywordTokenizer;
import org.apache.lucene.analysis.synonym.SynonymFilter;
import org.apache.lucene.analysis.synonym.SynonymGraphFilter;
import org.apache.lucene.analysis.synonym.SynonymMap;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
@ -156,7 +156,7 @@ public class TestRemoveDuplicatesTokenFilter extends BaseTokenStreamTestCase {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
TokenStream stream = new SynonymFilter(tokenizer, map, ignoreCase);
TokenStream stream = new SynonymGraphFilter(tokenizer, map, ignoreCase);
return new TokenStreamComponents(tokenizer, new RemoveDuplicatesTokenFilter(stream));
}
};

View File

@ -37,25 +37,27 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.util.IOUtils;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_ALL;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_NUMBERS;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_WORDS;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_NUMBER_PARTS;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_WORD_PARTS;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.IGNORE_KEYWORDS;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.PRESERVE_ORIGINAL;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_CASE_CHANGE;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_NUMERICS;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE;
/**
* New WordDelimiterFilter tests... most of the tests are in ConvertedLegacyTest
* TODO: should explicitly test things like protWords and not rely on
* the factory tests in Solr.
*/
@Deprecated
public class TestWordDelimiterFilter extends BaseTokenStreamTestCase {
private static final int CATENATE_ALL = WordDelimiterFilter.CATENATE_ALL;
private static final int CATENATE_NUMBERS = WordDelimiterFilter.CATENATE_NUMBERS;
private static final int CATENATE_WORDS = WordDelimiterFilter.CATENATE_WORDS;
private static final int GENERATE_NUMBER_PARTS = WordDelimiterFilter.GENERATE_NUMBER_PARTS;
private static final int GENERATE_WORD_PARTS = WordDelimiterFilter.GENERATE_WORD_PARTS;
private static final int IGNORE_KEYWORDS = WordDelimiterFilter.IGNORE_KEYWORDS;
private static final int PRESERVE_ORIGINAL = WordDelimiterFilter.PRESERVE_ORIGINAL;
private static final int SPLIT_ON_CASE_CHANGE = WordDelimiterFilter.SPLIT_ON_CASE_CHANGE;
private static final int SPLIT_ON_NUMERICS = WordDelimiterFilter.SPLIT_ON_NUMERICS;
private static final int STEM_ENGLISH_POSSESSIVE = WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE;
private static final byte[] DEFAULT_WORD_DELIM_TABLE = WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE;
/*
public void testPerformance() throws IOException {
String s = "now is the time-for all good men to come to-the aid of their country.";

View File

@ -120,7 +120,7 @@ public class EdgeNGramTokenizerTest extends BaseTokenStreamTestCase {
}
private static void testNGrams(int minGram, int maxGram, int length, final String nonTokenChars) throws IOException {
final String s = RandomStrings.randomAsciiOfLength(random(), length);
final String s = RandomStrings.randomAsciiLettersOfLengthBetween(random(), length, length);
testNGrams(minGram, maxGram, s, nonTokenChars);
}

View File

@ -120,7 +120,7 @@ public class NGramTokenizerTest extends BaseTokenStreamTestCase {
}
private static void testNGrams(int minGram, int maxGram, int length, final String nonTokenChars) throws IOException {
final String s = RandomStrings.randomAsciiOfLength(random(), length);
final String s = RandomStrings.randomAsciiLettersOfLengthBetween(random(), length, length);
testNGrams(minGram, maxGram, s, nonTokenChars);
}

View File

@ -30,6 +30,7 @@ import org.apache.lucene.analysis.en.EnglishAnalyzer;
* Tests parser for the Solr synonyms format
* @lucene.experimental
*/
@Deprecated
public class TestSolrSynonymParser extends BaseSynonymParserTestCase {
/** Tests some simple examples from the solr wiki */

View File

@ -27,6 +27,7 @@ import org.apache.lucene.analysis.util.StringMockResourceLoader;
import org.apache.lucene.analysis.cjk.CJKAnalyzer;
import org.apache.lucene.util.Version;
@Deprecated
public class TestSynonymFilterFactory extends BaseTokenStreamFactoryTestCase {
/** checks for synonyms of "GB" in synonyms.txt */

View File

@ -39,6 +39,7 @@ import org.apache.lucene.analysis.tokenattributes.*;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.TestUtil;
@Deprecated
public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
private SynonymMap.Builder b;

View File

@ -36,7 +36,8 @@ public class TestWordnetSynonymParser extends BaseTokenStreamTestCase {
"s(100000003,2,'baron',n,1,1).\n" +
"s(100000004,1,'king''s evil',n,1,1).\n" +
"s(100000004,2,'king''s meany',n,1,1).\n";
@SuppressWarnings("deprecation")
public void testSynonyms() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
WordnetSynonymParser parser = new WordnetSynonymParser(true, true, analyzer);

View File

@ -53,7 +53,7 @@ public class TestFilesystemResourceLoader extends LuceneTestCase {
assertEquals("org.apache.lucene.analysis.util.RollingCharBuffer",
rl.newInstance("org.apache.lucene.analysis.util.RollingCharBuffer", Object.class).getClass().getName());
}
public void testBaseDir() throws Exception {
final Path base = createTempDir("fsResourceLoaderBase");
Writer os = Files.newBufferedWriter(base.resolve("template.txt"), StandardCharsets.UTF_8);
@ -62,7 +62,8 @@ public class TestFilesystemResourceLoader extends LuceneTestCase {
} finally {
IOUtils.closeWhileHandlingException(os);
}
@SuppressWarnings("deprecation")
ResourceLoader rl = new FilesystemResourceLoader(base);
assertEquals("foobar", WordlistLoader.getLines(rl.openResource("template.txt"), StandardCharsets.UTF_8).get(0));
// Same with full path name:

View File

@ -73,6 +73,7 @@ public final class ICUTransformFilter extends TokenFilter {
* @param input {@link TokenStream} to filter.
* @param transform Transliterator to transform the text.
*/
@SuppressWarnings("deprecation")
public ICUTransformFilter(TokenStream input, Transliterator transform) {
super(input);
this.transform = transform;

View File

@ -36,7 +36,7 @@ import org.junit.Before;
/**
* Base class for testing {@link org.apache.lucene.classification.Classifier}s
*/
public abstract class DocumentClassificationTestBase<T> extends ClassificationTestBase {
public abstract class DocumentClassificationTestBase<T> extends ClassificationTestBase<T>{
protected static final BytesRef VIDEOGAME_RESULT = new BytesRef("videogames");
protected static final BytesRef VIDEOGAME_ANALYZED_RESULT = new BytesRef("videogam");

View File

@ -33,6 +33,7 @@ import org.apache.lucene.util.SuppressForbidden;
@SuppressForbidden(reason = "Uses a Long instance as a marker")
public final class PositiveIntOutputs extends Outputs<Long> {
@SuppressWarnings("deprecation")
private final static Long NO_OUTPUT = new Long(0);
private final static PositiveIntOutputs singleton = new PositiveIntOutputs();

View File

@ -90,6 +90,7 @@ abstract class Packed64SingleBlock extends PackedInts.MutableImpl {
// bulk get
assert index % valuesPerBlock == 0;
@SuppressWarnings("deprecation")
final PackedInts.Decoder decoder = BulkOperation.of(PackedInts.Format.PACKED_SINGLE_BLOCK, bitsPerValue);
assert decoder.longBlockCount() == 1;
assert decoder.longValueCount() == valuesPerBlock;
@ -134,6 +135,7 @@ abstract class Packed64SingleBlock extends PackedInts.MutableImpl {
// bulk set
assert index % valuesPerBlock == 0;
@SuppressWarnings("deprecation")
final BulkOperation op = BulkOperation.of(PackedInts.Format.PACKED_SINGLE_BLOCK, bitsPerValue);
assert op.longBlockCount() == 1;
assert op.longValueCount() == valuesPerBlock;
@ -195,6 +197,7 @@ abstract class Packed64SingleBlock extends PackedInts.MutableImpl {
}
@Override
@SuppressWarnings("deprecation")
protected PackedInts.Format getFormat() {
return PackedInts.Format.PACKED_SINGLE_BLOCK;
}

View File

@ -61,15 +61,19 @@ public class TestCharArraySet extends LuceneTestCase {
public void testObjectContains() {
CharArraySet set = new CharArraySet(10, true);
Integer val = Integer.valueOf(1);
@SuppressWarnings("deprecation")
Integer val1 = new Integer(1);
// Verify explicitly the case of different Integer instances
assertNotSame(val, val1);
set.add(val);
assertTrue(set.contains(val));
assertTrue(set.contains(new Integer(1))); // another integer
assertTrue(set.contains(val1)); // another integer
assertTrue(set.contains("1"));
assertTrue(set.contains(new char[]{'1'}));
// test unmodifiable
set = CharArraySet.unmodifiableSet(set);
assertTrue(set.contains(val));
assertTrue(set.contains(new Integer(1))); // another integer
assertTrue(set.contains(val1)); // another integer
assertTrue(set.contains("1"));
assertTrue(set.contains(new char[]{'1'}));
}

View File

@ -214,14 +214,10 @@ public class TestDocument extends LuceneTestCase {
public void testGetValues() {
Document doc = makeDocumentWithFields();
assertEquals(new String[] {"test1", "test2"},
doc.getValues("keyword"));
assertEquals(new String[] {"test1", "test2"},
doc.getValues("text"));
assertEquals(new String[] {"test1", "test2"},
doc.getValues("unindexed"));
assertEquals(new String[0],
doc.getValues("nope"));
assertArrayEquals(new String[] {"test1", "test2"}, doc.getValues("keyword"));
assertArrayEquals(new String[] {"test1", "test2"}, doc.getValues("text"));
assertArrayEquals(new String[] {"test1", "test2"}, doc.getValues("unindexed"));
assertArrayEquals(new String[0], doc.getValues("nope"));
}
public void testPositionIncrementMultiFields() throws Exception {

View File

@ -28,13 +28,12 @@ import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import junit.framework.Assert;
public class TestMultiTermConstantScore extends BaseTestRangeFilter {
/** threshold for comparing floats */

View File

@ -295,7 +295,7 @@ public class TestSearchAfter extends LuceneTestCase {
assertEquals(sd1.score, sd2.score, 0f);
if (sd1 instanceof FieldDoc) {
assertTrue(sd2 instanceof FieldDoc);
assertEquals(((FieldDoc) sd1).fields, ((FieldDoc) sd2).fields);
assertArrayEquals(((FieldDoc) sd1).fields, ((FieldDoc) sd2).fields);
}
}
}

View File

@ -20,6 +20,8 @@ import java.util.Arrays;
public class TestCharsRef extends LuceneTestCase {
@SuppressWarnings("deprecation")
public void testUTF16InUTF8Order() {
final int numStrings = atLeast(1000);
BytesRef utf8[] = new BytesRef[numStrings];

View File

@ -23,6 +23,7 @@ import java.text.ParseException;
import java.util.Locale;
import java.util.Random;
@SuppressWarnings("deprecation")
public class TestVersion extends LuceneTestCase {
public void testOnOrAfter() throws Exception {

View File

@ -256,7 +256,9 @@ public class Test2BFST extends LuceneTestCase {
// forward lookup:
assertEquals(output, Util.get(fst, input).longValue());
// reverse lookup:
assertEquals(input, Util.getByOutput(fst, output));
@SuppressWarnings("deprecation")
IntsRef inputResult = Util.getByOutput(fst, output);
assertEquals(input, inputResult);
output += 1 + r.nextInt(10);
nextInput(r, ints);
}

View File

@ -570,7 +570,7 @@ public class TestFSTs extends LuceneTestCase {
} else {
// Get by output
final Long output = (Long) getOutput(intsRef.get(), ord);
@SuppressWarnings("unchecked") final IntsRef actual = Util.getByOutput((FST<Long>) fst, output.longValue());
@SuppressWarnings({"unchecked", "deprecation"}) final IntsRef actual = Util.getByOutput((FST<Long>) fst, output.longValue());
if (actual == null) {
throw new RuntimeException("unexpected null input from output=" + output);
}
@ -833,13 +833,17 @@ public class TestFSTs extends LuceneTestCase {
assertEquals(b, seekResult.input);
assertEquals(42, (long) seekResult.output);
assertEquals(Util.toIntsRef(new BytesRef("c"), new IntsRefBuilder()),
Util.getByOutput(fst, 13824324872317238L));
assertNull(Util.getByOutput(fst, 47));
assertEquals(Util.toIntsRef(new BytesRef("b"), new IntsRefBuilder()),
Util.getByOutput(fst, 42));
assertEquals(Util.toIntsRef(new BytesRef("a"), new IntsRefBuilder()),
Util.getByOutput(fst, 17));
@SuppressWarnings("deprecation") IntsRef byOutput = Util.getByOutput(fst, 13824324872317238L);
assertEquals(Util.toIntsRef(new BytesRef("c"), new IntsRefBuilder()), byOutput);
@SuppressWarnings("deprecation") IntsRef byOutput47 = Util.getByOutput(fst, 47);
assertNull(byOutput47);
@SuppressWarnings("deprecation") IntsRef byOutput42 = Util.getByOutput(fst, 42);
assertEquals(Util.toIntsRef(new BytesRef("b"), new IntsRefBuilder()), byOutput42);
@SuppressWarnings("deprecation") IntsRef byOutput17 = Util.getByOutput(fst, 17);
assertEquals(Util.toIntsRef(new BytesRef("a"), new IntsRefBuilder()), byOutput17);
}
public void testPrimaryKeys() throws Exception {

View File

@ -836,7 +836,9 @@ public class TestPackedInts extends LuceneTestCase {
final long[] blocks = new long[blocksOffset + blocksLen];
for (int i = 0; i < blocks.length; ++i) {
blocks[i] = random().nextLong();
if (format == PackedInts.Format.PACKED_SINGLE_BLOCK && 64 % bpv != 0) {
@SuppressWarnings("deprecation")
PackedInts.Format PACKED_SINGLE_BLOCK = PackedInts.Format.PACKED_SINGLE_BLOCK;
if (format == PACKED_SINGLE_BLOCK && 64 % bpv != 0) {
// clear highest bits for packed
final int toClear = 64 % bpv;
blocks[i] = (blocks[i] << toClear) >>> toClear;

View File

@ -299,7 +299,7 @@ public class DrillSideways {
if (executor != null) { // We have an executor, let use the multi-threaded version
final CollectorManager<TopScoreDocCollector, TopDocs> collectorManager =
new CollectorManager<TopScoreDocCollector, TopDocs>() {
new CollectorManager<>() {
@Override
public TopScoreDocCollector newCollector() throws IOException {
@ -404,6 +404,7 @@ public class DrillSideways {
}
/** Runs a search, using a {@link CollectorManager} to gather and merge search results */
@SuppressWarnings("unchecked")
public <R> ConcurrentDrillSidewaysResult<R> search(final DrillDownQuery query,
final CollectorManager<?, R> hitCollectorManager) throws IOException {

View File

@ -86,7 +86,7 @@ public class GroupingSearch {
* @param groupFunction The function to group by specified as {@link ValueSource}
* @param valueSourceContext The context of the specified groupFunction
*/
public GroupingSearch(ValueSource groupFunction, Map<?, ?> valueSourceContext) {
public GroupingSearch(ValueSource groupFunction, Map<Object, Object> valueSourceContext) {
this(new ValueSourceGroupSelector(groupFunction, valueSourceContext), null);
}

View File

@ -35,7 +35,7 @@ import org.apache.lucene.util.mutable.MutableValue;
public class ValueSourceGroupSelector extends GroupSelector<MutableValue> {
private final ValueSource valueSource;
private final Map<?, ?> context;
private final Map<Object, Object> context;
private Set<MutableValue> secondPassGroups;
@ -44,7 +44,7 @@ public class ValueSourceGroupSelector extends GroupSelector<MutableValue> {
* @param valueSource the ValueSource to group by
* @param context a context map for the ValueSource
*/
public ValueSourceGroupSelector(ValueSource valueSource, Map<?, ?> context) {
public ValueSourceGroupSelector(ValueSource valueSource, Map<Object, Object> context) {
this.valueSource = valueSource;
this.context = context;
}

View File

@ -310,7 +310,7 @@ public abstract class BaseGroupSelectorTestCase<T> extends AbstractGroupingTestC
// TODO why does SearchGroup.merge() take a list but TopGroups.merge() take an array?
@SuppressWarnings("unchecked")
TopGroups<T>[] shardTopGroups = new TopGroups[shards.length];
TopGroups<T>[] shardTopGroups = (TopGroups<T>[]) new TopGroups<?>[shards.length];
int j = 0;
for (Shard shard : shards) {
TopGroupsCollector<T> sc = new TopGroupsCollector<>(getGroupSelector(), mergedGroups, sort, Sort.RELEVANCE, 5, true);

View File

@ -287,9 +287,11 @@ public class TestGrouping extends LuceneTestCase {
private Collection<SearchGroup<BytesRef>> getSearchGroups(FirstPassGroupingCollector<?> c, int groupOffset) throws IOException {
if (TermGroupSelector.class.isAssignableFrom(c.getGroupSelector().getClass())) {
@SuppressWarnings("unchecked")
FirstPassGroupingCollector<BytesRef> collector = (FirstPassGroupingCollector<BytesRef>) c;
return collector.getTopGroups(groupOffset);
} else if (ValueSourceGroupSelector.class.isAssignableFrom(c.getGroupSelector().getClass())) {
@SuppressWarnings("unchecked")
FirstPassGroupingCollector<MutableValue> collector = (FirstPassGroupingCollector<MutableValue>) c;
Collection<SearchGroup<MutableValue>> mutableValueGroups = collector.getTopGroups(groupOffset);
if (mutableValueGroups == null) {

View File

@ -30,6 +30,8 @@ import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.NumericUtils;
import static org.apache.lucene.search.join.BlockJoinSelector.toIter;
/**
* A special sort field that allows sorting parent docs based on nested / child level fields.
* Based on the sort order it either takes the document with the lowest or highest field value into account.
@ -118,7 +120,7 @@ public class ToParentBlockJoinSortField extends SortField {
if (children == null) {
return DocValues.emptySorted();
}
return BlockJoinSelector.wrap(sortedSet, type, parents, children);
return BlockJoinSelector.wrap(sortedSet, type, parents, toIter(children));
}
};
@ -137,7 +139,7 @@ public class ToParentBlockJoinSortField extends SortField {
if (children == null) {
return DocValues.emptyNumeric();
}
return BlockJoinSelector.wrap(sortedNumeric, type, parents, children);
return BlockJoinSelector.wrap(sortedNumeric, type, parents, toIter(children));
}
};
}
@ -155,7 +157,7 @@ public class ToParentBlockJoinSortField extends SortField {
if (children == null) {
return DocValues.emptyNumeric();
}
return BlockJoinSelector.wrap(sortedNumeric, type, parents, children);
return BlockJoinSelector.wrap(sortedNumeric, type, parents, toIter(children));
}
};
}
@ -173,7 +175,7 @@ public class ToParentBlockJoinSortField extends SortField {
if (children == null) {
return DocValues.emptyNumeric();
}
return new FilterNumericDocValues(BlockJoinSelector.wrap(sortedNumeric, type, parents, children)) {
return new FilterNumericDocValues(BlockJoinSelector.wrap(sortedNumeric, type, parents, toIter(children))) {
@Override
public long longValue() throws IOException {
// undo the numericutils sortability
@ -197,7 +199,7 @@ public class ToParentBlockJoinSortField extends SortField {
if (children == null) {
return DocValues.emptyNumeric();
}
return new FilterNumericDocValues(BlockJoinSelector.wrap(sortedNumeric, type, parents, children)) {
return new FilterNumericDocValues(BlockJoinSelector.wrap(sortedNumeric, type, parents, toIter(children))) {
@Override
public long longValue() throws IOException {
// undo the numericutils sortability

View File

@ -17,6 +17,7 @@
package org.apache.lucene.search.join;
import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
import static org.apache.lucene.search.join.BlockJoinSelector.toIter;
import java.io.IOException;
import java.util.Arrays;
@ -132,14 +133,14 @@ public class TestBlockJoinSelector extends LuceneTestCase {
ords[12] = 10;
ords[18] = 10;
final SortedDocValues mins = BlockJoinSelector.wrap(DocValues.singleton(new CannedSortedDocValues(ords)), BlockJoinSelector.Type.MIN, parents, children);
final SortedDocValues mins = BlockJoinSelector.wrap(DocValues.singleton(new CannedSortedDocValues(ords)), BlockJoinSelector.Type.MIN, parents, toIter(children));
assertEquals(5, nextDoc(mins,5));
assertEquals(3, mins.ordValue());
assertEquals(15, nextDoc(mins,15));
assertEquals(10, mins.ordValue());
assertNoMoreDoc(mins, 20);
final SortedDocValues maxs = BlockJoinSelector.wrap(DocValues.singleton(new CannedSortedDocValues(ords)), BlockJoinSelector.Type.MAX, parents, children);
final SortedDocValues maxs = BlockJoinSelector.wrap(DocValues.singleton(new CannedSortedDocValues(ords)), BlockJoinSelector.Type.MAX, parents, toIter(children));
assertEquals(5, nextDoc(maxs,5));
assertEquals(7, maxs.ordValue());
assertEquals(15, nextDoc(maxs,15));
@ -246,14 +247,14 @@ public class TestBlockJoinSelector extends LuceneTestCase {
docsWithValue.set(18);
longs[18] = 10;
final NumericDocValues mins = BlockJoinSelector.wrap(DocValues.singleton(new CannedNumericDocValues(longs, docsWithValue)), BlockJoinSelector.Type.MIN, parents, children);
final NumericDocValues mins = BlockJoinSelector.wrap(DocValues.singleton(new CannedNumericDocValues(longs, docsWithValue)), BlockJoinSelector.Type.MIN, parents, toIter(children));
assertEquals(5, nextDoc(mins,5));
assertEquals(3, mins.longValue());
assertEquals(15, nextDoc(mins,15));
assertEquals(10, mins.longValue());
assertNoMoreDoc(mins, 20);
final NumericDocValues maxs = BlockJoinSelector.wrap(DocValues.singleton(new CannedNumericDocValues(longs, docsWithValue)), BlockJoinSelector.Type.MAX, parents, children);
final NumericDocValues maxs = BlockJoinSelector.wrap(DocValues.singleton(new CannedNumericDocValues(longs, docsWithValue)), BlockJoinSelector.Type.MAX, parents, toIter(children));
assertEquals(5, nextDoc(maxs, 5));
assertEquals(7, maxs.longValue());
assertEquals(15, nextDoc(maxs, 15));

View File

@ -82,7 +82,8 @@ public final class UpToTwoPositiveIntOutputs extends Outputs<Object> {
return (int) ((first^(first>>>32)) ^ (second^(second>>32)));
}
}
@SuppressWarnings("deprecation")
private final static Long NO_OUTPUT = new Long(0);
private final boolean doShare;

View File

@ -23,6 +23,7 @@ import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.search.similarities.BaseSimilarityTestCase;
import org.apache.lucene.search.similarities.Similarity;
@Deprecated
public class TestLegacyBM25Similarity extends BaseSimilarityTestCase {
public void testIllegalK1() {

View File

@ -56,7 +56,7 @@ public class FunctionQuery extends Query {
protected class FunctionWeight extends Weight {
protected final IndexSearcher searcher;
protected final float boost;
protected final Map context;
protected final Map<Object, Object> context;
public FunctionWeight(IndexSearcher searcher, float boost) throws IOException {
super(FunctionQuery.this);

View File

@ -124,8 +124,7 @@ public class FunctionRangeQuery extends Query {
}
private class FunctionRangeWeight extends Weight {
@SuppressWarnings("rawtypes")
private final Map vsContext;
private final Map<Object, Object> vsContext;
public FunctionRangeWeight(IndexSearcher searcher) throws IOException {
super(FunctionRangeQuery.this);

View File

@ -50,7 +50,7 @@ public abstract class ValueSource {
* docID manner, and you must call this method again to iterate through
* the values again.
*/
public abstract FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException;
public abstract FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException;
@Override
public abstract boolean equals(Object o);
@ -74,14 +74,14 @@ public abstract class ValueSource {
* weight info in the context. The context object will be passed to getValues()
* where this info can be retrieved.
*/
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
}
/**
* Returns a new non-threadsafe context map.
*/
public static Map newContext(IndexSearcher searcher) {
Map context = new IdentityHashMap();
public static Map<Object, Object> newContext(IndexSearcher searcher) {
Map<Object, Object> context = new IdentityHashMap<>();
context.put("searcher", searcher);
return context;
}
@ -119,7 +119,7 @@ public abstract class ValueSource {
@Override
public LongValues getValues(LeafReaderContext ctx, DoubleValues scores) throws IOException {
Map context = new IdentityHashMap<>();
Map<Object, Object> context = new IdentityHashMap<>();
ScoreAndDoc scorer = new ScoreAndDoc();
context.put("scorer", scorer);
final FunctionValues fv = in.getValues(context, ctx);
@ -196,7 +196,7 @@ public abstract class ValueSource {
@Override
public DoubleValues getValues(LeafReaderContext ctx, DoubleValues scores) throws IOException {
Map context = new HashMap<>();
Map<Object, Object> context = new HashMap<>();
ScoreAndDoc scorer = new ScoreAndDoc();
context.put("scorer", scorer);
context.put("searcher", searcher);
@ -236,7 +236,7 @@ public abstract class ValueSource {
@Override
public Explanation explain(LeafReaderContext ctx, int docId, Explanation scoreExplanation) throws IOException {
Map context = new HashMap<>();
Map<Object, Object> context = new HashMap<>();
ScoreAndDoc scorer = new ScoreAndDoc();
scorer.score = scoreExplanation.getValue().floatValue();
context.put("scorer", scorer);
@ -283,7 +283,7 @@ public abstract class ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
Scorable scorer = (Scorable) context.get("scorer");
DoubleValues scores = scorer == null ? null : DoubleValuesSource.fromScorer(scorer);
@ -365,16 +365,16 @@ public abstract class ValueSource {
@Override
public SortField rewrite(IndexSearcher searcher) throws IOException {
Map context = newContext(searcher);
Map<Object, Object> context = newContext(searcher);
createWeight(context, searcher);
return new SortField(getField(), new ValueSourceComparatorSource(context), getReverse());
}
}
class ValueSourceComparatorSource extends FieldComparatorSource {
private final Map context;
private final Map<Object, Object> context;
public ValueSourceComparatorSource(Map context) {
public ValueSourceComparatorSource(Map<Object, Object> context) {
this.context = context;
}
@ -394,10 +394,10 @@ public abstract class ValueSource {
private final double[] values;
private FunctionValues docVals;
private double bottom;
private final Map fcontext;
private final Map<Object, Object> fcontext;
private double topValue;
ValueSourceComparator(Map fcontext, int numHits) {
ValueSourceComparator(Map<Object, Object> fcontext, int numHits) {
this.fcontext = fcontext;
values = new double[numHits];
}
@ -429,7 +429,7 @@ public abstract class ValueSource {
@Override
public void setTopValue(final Double value) {
this.topValue = value.doubleValue();
this.topValue = value;
}
@Override

View File

@ -41,7 +41,7 @@ public class BytesRefFieldSource extends FieldCacheSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FieldInfo fieldInfo = readerContext.reader().getFieldInfos().fieldInfo(field);
// To be sorted or not to be sorted, that is the question

View File

@ -50,7 +50,7 @@ public abstract class ComparisonBoolFunction extends BoolFunction {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FunctionValues lhsVal = this.lhs.getValues(context, readerContext);
final FunctionValues rhsVal = this.rhs.getValues(context, readerContext);
final String compLabel = this.name();
@ -97,7 +97,7 @@ public abstract class ComparisonBoolFunction extends BoolFunction {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
lhs.createWeight(context, searcher);
rhs.createWeight(context, searcher);
}

View File

@ -41,7 +41,7 @@ public class ConstValueSource extends ConstNumberSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
return new FloatDocValues(this) {
@Override
public float floatVal(int doc) {

View File

@ -42,7 +42,7 @@ public class DefFunction extends MultiFunction {
@Override
public FunctionValues getValues(Map fcontext, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> fcontext, LeafReaderContext readerContext) throws IOException {
return new Values(valsArr(sources, fcontext, readerContext)) {

View File

@ -57,15 +57,15 @@ public class DocFreqValueSource extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
IndexSearcher searcher = (IndexSearcher)context.get("searcher");
int docfreq = searcher.getIndexReader().docFreq(new Term(indexedField, indexedBytes));
return new ConstIntDocValues(docfreq, this);
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
context.put("searcher",searcher);
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
context.put("searcher", searcher);
}
@Override

View File

@ -43,7 +43,7 @@ public class DoubleConstValueSource extends ConstNumberSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
return new DoubleDocValues(this) {
@Override
public float floatVal(int doc) {

View File

@ -50,7 +50,7 @@ public class DoubleFieldSource extends FieldCacheSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final NumericDocValues values = getNumericDocValues(context, readerContext);
@ -104,7 +104,7 @@ public class DoubleFieldSource extends FieldCacheSource {
};
}
protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
protected NumericDocValues getNumericDocValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
return DocValues.getNumeric(readerContext.reader(), field);
}

View File

@ -51,7 +51,7 @@ public abstract class DualFloatFunction extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FunctionValues aVals = a.getValues(context, readerContext);
final FunctionValues bVals = b.getValues(context, readerContext);
return new FloatDocValues(this) {
@ -75,7 +75,7 @@ public abstract class DualFloatFunction extends ValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
a.createWeight(context,searcher);
b.createWeight(context,searcher);
}

View File

@ -94,7 +94,7 @@ public class EnumFieldSource extends FieldCacheSource {
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final NumericDocValues arr = DocValues.getNumeric(readerContext.reader(), field);
return new IntDocValues(this) {

View File

@ -50,7 +50,7 @@ public class FloatFieldSource extends FieldCacheSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final NumericDocValues arr = getNumericDocValues(context, readerContext);
@ -105,7 +105,7 @@ public class FloatFieldSource extends FieldCacheSource {
};
}
protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
protected NumericDocValues getNumericDocValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
return DocValues.getNumeric(readerContext.reader(), field);
}

View File

@ -45,7 +45,7 @@ public class IDFValueSource extends DocFreqValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
IndexSearcher searcher = (IndexSearcher)context.get("searcher");
TFIDFSimilarity sim = asTFIDF(searcher.getSimilarity(), field);
if (sim == null) {

View File

@ -43,7 +43,7 @@ public class IfFunction extends BoolFunction {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FunctionValues ifVals = ifSource.getValues(context, readerContext);
final FunctionValues trueVals = trueSource.getValues(context, readerContext);
final FunctionValues falseVals = falseSource.getValues(context, readerContext);
@ -142,7 +142,7 @@ public class IfFunction extends BoolFunction {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
ifSource.createWeight(context, searcher);
trueSource.createWeight(context, searcher);
falseSource.createWeight(context, searcher);

View File

@ -50,7 +50,7 @@ public class IntFieldSource extends FieldCacheSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final NumericDocValues arr = getNumericDocValues(context, readerContext);
@ -109,7 +109,7 @@ public class IntFieldSource extends FieldCacheSource {
};
}
protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
protected NumericDocValues getNumericDocValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
return DocValues.getNumeric(readerContext.reader(), field);
}

View File

@ -53,7 +53,7 @@ public class JoinDocFreqValueSource extends FieldCacheSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException
{
final BinaryDocValues terms = DocValues.getBinary(readerContext.reader(), field);
final IndexReader top = ReaderUtil.getTopLevelContext(readerContext).reader();

View File

@ -50,7 +50,7 @@ public class LinearFloatFunction extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FunctionValues vals = source.getValues(context, readerContext);
return new FloatDocValues(this) {
@Override
@ -69,7 +69,7 @@ public class LinearFloatFunction extends ValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
source.createWeight(context, searcher);
}

View File

@ -45,7 +45,7 @@ public class LiteralValueSource extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
return new StrDocValues(this) {
@Override

View File

@ -62,7 +62,7 @@ public class LongFieldSource extends FieldCacheSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final NumericDocValues arr = getNumericDocValues(context, readerContext);
@ -142,7 +142,7 @@ public class LongFieldSource extends FieldCacheSource {
};
}
protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
protected NumericDocValues getNumericDocValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
return DocValues.getNumeric(readerContext.reader(), field);
}

View File

@ -41,12 +41,12 @@ public class MaxDocValueSource extends ValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
context.put("searcher",searcher);
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
context.put("searcher", searcher);
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
IndexSearcher searcher = (IndexSearcher)context.get("searcher");
return new DocFreqValueSource.ConstIntDocValues(searcher.getIndexReader().maxDoc(), this);
}

View File

@ -42,7 +42,7 @@ public abstract class MultiBoolFunction extends BoolFunction {
protected abstract boolean func(int doc, FunctionValues[] vals) throws IOException;
@Override
public BoolDocValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public BoolDocValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FunctionValues[] vals = new FunctionValues[sources.size()];
int i=0;
for (ValueSource source : sources) {
@ -104,7 +104,7 @@ public abstract class MultiBoolFunction extends BoolFunction {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
for (ValueSource source : sources) {
source.createWeight(context, searcher);
}

View File

@ -70,7 +70,7 @@ public abstract class MultiFloatFunction extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FunctionValues[] valsArr = new FunctionValues[sources.length];
for (int i=0; i<sources.length; i++) {
valsArr[i] = sources[i].getValues(context, readerContext);
@ -92,7 +92,7 @@ public abstract class MultiFloatFunction extends ValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
for (ValueSource source : sources)
source.createWeight(context, searcher);
}

View File

@ -112,7 +112,7 @@ public abstract class MultiFunction extends ValueSource {
return sb.toString();
}
public static FunctionValues[] valsArr(List<ValueSource> sources, Map fcontext, LeafReaderContext readerContext) throws IOException {
public static FunctionValues[] valsArr(List<ValueSource> sources, Map<Object, Object> fcontext, LeafReaderContext readerContext) throws IOException {
final FunctionValues[] valsArr = new FunctionValues[sources.size()];
int i=0;
for (ValueSource source : sources) {
@ -157,7 +157,7 @@ public abstract class MultiFunction extends ValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
for (ValueSource source : sources)
source.createWeight(context, searcher);
}

View File

@ -55,7 +55,7 @@ public class MultiValuedDoubleFieldSource extends DoubleFieldSource {
}
@Override
protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
protected NumericDocValues getNumericDocValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
SortedNumericDocValues sortedDv = DocValues.getSortedNumeric(readerContext.reader(), field);
return SortedNumericSelector.wrap(sortedDv, selector, SortField.Type.DOUBLE);
}

View File

@ -55,7 +55,7 @@ public class MultiValuedFloatFieldSource extends FloatFieldSource {
}
@Override
protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
protected NumericDocValues getNumericDocValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
SortedNumericDocValues sortedDv = DocValues.getSortedNumeric(readerContext.reader(), field);
return SortedNumericSelector.wrap(sortedDv, selector, SortField.Type.FLOAT);
}

View File

@ -55,7 +55,7 @@ public class MultiValuedIntFieldSource extends IntFieldSource {
}
@Override
protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
protected NumericDocValues getNumericDocValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
SortedNumericDocValues sortedDv = DocValues.getSortedNumeric(readerContext.reader(), field);
return SortedNumericSelector.wrap(sortedDv, selector, SortField.Type.INT);
}

View File

@ -55,7 +55,7 @@ public class MultiValuedLongFieldSource extends LongFieldSource {
}
@Override
protected NumericDocValues getNumericDocValues(Map context, LeafReaderContext readerContext) throws IOException {
protected NumericDocValues getNumericDocValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
SortedNumericDocValues sortedDv = DocValues.getSortedNumeric(readerContext.reader(), field);
return SortedNumericSelector.wrap(sortedDv, selector, SortField.Type.LONG);
}

View File

@ -55,12 +55,12 @@ public class NormValueSource extends ValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
context.put("searcher",searcher);
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
IndexSearcher searcher = (IndexSearcher)context.get("searcher");
final TFIDFSimilarity similarity = IDFValueSource.asTFIDF(searcher.getSimilarity(), field);
if (similarity == null) {

View File

@ -41,7 +41,7 @@ public class NumDocsValueSource extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
// Searcher has no numdocs so we must use the reader instead
return new DocFreqValueSource.ConstIntDocValues(ReaderUtil.getTopLevelContext(readerContext).reader().numDocs(), this);
}

View File

@ -55,7 +55,7 @@ public class QueryValueSource extends ValueSource {
}
@Override
public FunctionValues getValues(Map fcontext, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> fcontext, LeafReaderContext readerContext) throws IOException {
return new QueryDocValues(this, readerContext, fcontext);
}
@ -72,7 +72,7 @@ public class QueryValueSource extends ValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
Query rewritten = searcher.rewrite(q);
Weight w = searcher.createWeight(rewritten, ScoreMode.COMPLETE, 1);
context.put(this, w);
@ -84,7 +84,7 @@ class QueryDocValues extends FloatDocValues {
final LeafReaderContext readerContext;
final Weight weight;
final float defVal;
final Map fcontext;
final Map<Object, Object> fcontext;
final Query q;
Scorer scorer;
@ -96,7 +96,7 @@ class QueryDocValues extends FloatDocValues {
int lastDocRequested=-1;
public QueryDocValues(QueryValueSource vs, LeafReaderContext readerContext, Map fcontext) throws IOException {
public QueryDocValues(QueryValueSource vs, LeafReaderContext readerContext, Map<Object, Object> fcontext) throws IOException {
super(vs);
this.readerContext = readerContext;

View File

@ -58,7 +58,7 @@ public class RangeMapFloatFunction extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FunctionValues vals = source.getValues(context, readerContext);
final FunctionValues targets = target.getValues(context, readerContext);
final FunctionValues defaults = (this.defaultVal == null) ? null : defaultVal.getValues(context, readerContext);
@ -76,7 +76,7 @@ public class RangeMapFloatFunction extends ValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
source.createWeight(context, searcher);
}

View File

@ -60,7 +60,7 @@ public class ReciprocalFloatFunction extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FunctionValues vals = source.getValues(context, readerContext);
return new FloatDocValues(this) {
@Override
@ -81,7 +81,7 @@ public class ReciprocalFloatFunction extends ValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
source.createWeight(context, searcher);
}

View File

@ -59,7 +59,7 @@ public class ScaleFloatFunction extends ValueSource {
float maxVal;
}
private ScaleInfo createScaleInfo(Map context, LeafReaderContext readerContext) throws IOException {
private ScaleInfo createScaleInfo(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final List<LeafReaderContext> leaves = ReaderUtil.getTopLevelContext(readerContext).leaves();
float minVal = Float.POSITIVE_INFINITY;
@ -100,7 +100,7 @@ public class ScaleFloatFunction extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
ScaleInfo scaleInfo = (ScaleInfo)context.get(ScaleFloatFunction.this);
if (scaleInfo == null) {
@ -133,7 +133,7 @@ public class ScaleFloatFunction extends ValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
source.createWeight(context, searcher);
}

View File

@ -44,7 +44,7 @@ public abstract class SimpleBoolFunction extends BoolFunction {
protected abstract boolean func(int doc, FunctionValues vals) throws IOException;
@Override
public BoolDocValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public BoolDocValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FunctionValues vals = source.getValues(context, readerContext);
return new BoolDocValues(this) {
@Override
@ -76,7 +76,7 @@ public abstract class SimpleBoolFunction extends BoolFunction {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
source.createWeight(context, searcher);
}
}

View File

@ -34,7 +34,7 @@ import java.util.Map;
protected abstract float func(int doc, FunctionValues vals) throws IOException;
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final FunctionValues vals = source.getValues(context, readerContext);
return new FloatDocValues(this) {
@Override

View File

@ -52,7 +52,7 @@ import java.util.Map;
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
source.createWeight(context, searcher);
}
}

View File

@ -55,7 +55,7 @@ public class SortedSetFieldSource extends FieldCacheSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
SortedSetDocValues sortedSet = DocValues.getSortedSet(readerContext.reader(), field);
SortedDocValues view = SortedSetSelector.wrap(sortedSet, selector);
return new DocTermsIndexDocValues(this, view) {

View File

@ -48,12 +48,12 @@ public class SumTotalTermFreqValueSource extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
return (FunctionValues)context.get(this);
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
long sumTotalTermFreq = 0;
for (LeafReaderContext readerContext : searcher.getTopReaderContext().leaves()) {
Terms terms = readerContext.reader().terms(indexedField);

View File

@ -48,7 +48,7 @@ public class TFValueSource extends TermFreqValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final Terms terms = readerContext.reader().terms(indexedField);
IndexSearcher searcher = (IndexSearcher)context.get("searcher");
final TFIDFSimilarity similarity = IDFValueSource.asTFIDF(searcher.getSimilarity(), indexedField);

View File

@ -46,7 +46,7 @@ public class TermFreqValueSource extends DocFreqValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
final Terms terms = readerContext.reader().terms(indexedField);
return new IntDocValues(this) {

View File

@ -55,12 +55,12 @@ public class TotalTermFreqValueSource extends ValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
return (FunctionValues)context.get(this);
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
long totalTermFreq = 0;
for (LeafReaderContext readerContext : searcher.getTopReaderContext().leaves()) {
long val = readerContext.reader().totalTermFreq(new Term(indexedField, indexedBytes));

View File

@ -52,7 +52,7 @@ public class VectorValueSource extends MultiValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) throws IOException {
int size = sources.size();
// special-case x,y and lat,lon since it's so common
@ -178,7 +178,7 @@ public class VectorValueSource extends MultiValueSource {
}
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
public void createWeight(Map<Object, Object> context, IndexSearcher searcher) throws IOException {
for (ValueSource source : sources)
source.createWeight(context, searcher);
}

View File

@ -163,6 +163,7 @@ final class IntervalMatches {
case ITERATING:
case NO_MORE_INTERVALS:
state = State.EXHAUSTED;
break;
case EXHAUSTED:
}
return NO_MORE_DOCS;

View File

@ -565,7 +565,7 @@ public class TestValueSources extends LuceneTestCase {
// actual doc / index is not relevant for this test
final LeafReaderContext leaf = searcher.getIndexReader().leaves().get(0);
final Map context = ValueSource.newContext(searcher);
final Map<Object, Object> context = ValueSource.newContext(searcher);
ALL_EXIST_VS.createWeight(context, searcher);
NONE_EXIST_VS.createWeight(context, searcher);
@ -643,12 +643,12 @@ public class TestValueSources extends LuceneTestCase {
assertExists(NONE_EXIST_VS, vs);
}
/**
* Asserts that for every doc, the {@link FunctionValues#exists} value from the
* <code>actual</code> {@link ValueSource} matches the {@link FunctionValues#exists}
* Asserts that for every doc, the {@link FunctionValues#exists} value from the
* <code>actual</code> {@link ValueSource} matches the {@link FunctionValues#exists}
* value from the <code>expected</code> {@link ValueSource}
*/
void assertExists(ValueSource expected, ValueSource actual) {
Map context = ValueSource.newContext(searcher);
Map<Object, Object> context = ValueSource.newContext(searcher);
try {
expected.createWeight(context, searcher);
actual.createWeight(context, searcher);
@ -712,7 +712,7 @@ public class TestValueSources extends LuceneTestCase {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) {
public FunctionValues getValues(Map<Object, Object> context, LeafReaderContext readerContext) {
return new FloatDocValues(this) {
@Override
public float floatVal(int doc) {

View File

@ -220,6 +220,7 @@ public class SimpleServer extends LuceneTestCase {
return new CopyState(files, version, gen, infosBytes, completedMergeFiles, primaryGen, null);
}
@SuppressWarnings("try")
public void test() throws Exception {
int id = Integer.parseInt(System.getProperty("tests.nrtreplication.nodeid"));

View File

@ -858,7 +858,7 @@ public class PhraseWildcardQuery extends Query {
@SuppressWarnings("unchecked")
protected void setTermStatesForSegment(LeafReaderContext leafReaderContext, List<TermBytesTermState> termStates) {
if (termStatesPerSegment == null) {
termStatesPerSegment = (List<TermBytesTermState>[]) new List[numSegments];
termStatesPerSegment = (List<TermBytesTermState>[]) new List<?>[numSegments];
termsData.numTermsMatching++;
}
termStatesPerSegment[leafReaderContext.ord] = termStates;

View File

@ -136,7 +136,7 @@ public abstract class SpatialStrategy {
public final DoubleValuesSource makeRecipDistanceValueSource(Shape queryShape) {
Rectangle bbox = queryShape.getBoundingBox();
double diagonalDist = ctx.getDistCalc().distance(
ctx.makePoint(bbox.getMinX(), bbox.getMinY()), bbox.getMaxX(), bbox.getMaxY());
ctx.getShapeFactory().pointXY(bbox.getMinX(), bbox.getMinY()), bbox.getMaxX(), bbox.getMaxY());
double distToEdge = diagonalDist * 0.5;
float c = (float)distToEdge * 0.1f;//one tenth
DoubleValuesSource distance = makeDistanceValueSource(queryShape.getCenter(), 1.0);

View File

@ -54,7 +54,7 @@ class BBoxValueSource extends ShapeValuesSource {
final NumericDocValues maxY = DocValues.getNumeric(reader, strategy.field_maxY);
//reused
final Rectangle rect = strategy.getSpatialContext().makeRectangle(0,0,0,0);
final Rectangle rect = strategy.getSpatialContext().getShapeFactory().rect(0,0,0,0);
return new ShapeValues() {

View File

@ -100,7 +100,7 @@ public class HeatmapFacetCounter {
//First get the rect of the cell at the bottom-left at depth facetLevel
final SpatialPrefixTree grid = strategy.getGrid();
final SpatialContext ctx = grid.getSpatialContext();
final Point cornerPt = ctx.makePoint(inputRect.getMinX(), inputRect.getMinY());
final Point cornerPt = ctx.getShapeFactory().pointXY(inputRect.getMinX(), inputRect.getMinY());
final CellIterator cellIterator = grid.getTreeCellIterator(cornerPt, facetLevel);
Cell cornerCell = null;
while (cellIterator.hasNext()) {
@ -141,7 +141,7 @@ public class HeatmapFacetCounter {
heatMaxY = worldRect.getMaxY();
}
final Heatmap heatmap = new Heatmap(columns, rows, ctx.makeRectangle(heatMinX, heatMaxX, heatMinY, heatMaxY));
final Heatmap heatmap = new Heatmap(columns, rows, ctx.getShapeFactory().rect(heatMinX, heatMaxX, heatMinY, heatMaxY));
if (topAcceptDocs instanceof Bits.MatchNoBits) {
return heatmap; // short-circuit
}

View File

@ -107,13 +107,13 @@ public class WithinPrefixTreeQuery extends AbstractVisitingPrefixTreeQuery {
throw new IllegalArgumentException("distErr must be > 0");
SpatialContext ctx = grid.getSpatialContext();
if (shape instanceof Point) {
return ctx.makeCircle((Point)shape, distErr);
return ctx.getShapeFactory().circle((Point)shape, distErr);
} else if (shape instanceof Circle) {
Circle circle = (Circle) shape;
double newDist = circle.getRadius() + distErr;
if (ctx.isGeo() && newDist > 180)
newDist = 180;
return ctx.makeCircle(circle.getCenter(), newDist);
return ctx.getShapeFactory().circle(circle.getCenter(), newDist);
} else {
Rectangle bbox = shape.getBoundingBox();
double newMinX = bbox.getMinX() - distErr;
@ -139,7 +139,7 @@ public class WithinPrefixTreeQuery extends AbstractVisitingPrefixTreeQuery {
newMinY = Math.max(newMinY, ctx.getWorldBounds().getMinY());
newMaxY = Math.min(newMaxY, ctx.getWorldBounds().getMaxY());
}
return ctx.makeRectangle(newMinX, newMaxX, newMinY, newMaxY);
return ctx.getShapeFactory().rect(newMinX, newMaxX, newMinY, newMaxY);
}
}

View File

@ -62,7 +62,9 @@ public class PackedQuadPrefixTree extends QuadPrefixTree {
@Override
protected SpatialPrefixTree newSPT() {
PackedQuadPrefixTree tree = new PackedQuadPrefixTree(ctx, maxLevels != null ? maxLevels : MAX_LEVELS_POSSIBLE);
tree.robust = getVersion().onOrAfter(Version.LUCENE_8_3_0);
@SuppressWarnings("deprecation")
Version lucene830 = Version.LUCENE_8_3_0;
tree.robust = getVersion().onOrAfter(lucene830);
return tree;
}
}
@ -88,7 +90,7 @@ public class PackedQuadPrefixTree extends QuadPrefixTree {
public Cell getCell(Point p, int level) {
if (!robust) { // old method
List<Cell> cells = new ArrayList<>(1);
buildNotRobustly(xmid, ymid, 0, cells, 0x0L, ctx.makePoint(p.getX(), p.getY()), level);
buildNotRobustly(xmid, ymid, 0, cells, 0x0L, ctx.getShapeFactory().pointXY(p.getX(), p.getY()), level);
if (!cells.isEmpty()) {
return cells.get(0);//note cells could be longer if p on edge
}
@ -152,7 +154,7 @@ public class PackedQuadPrefixTree extends QuadPrefixTree {
double w = levelW[level] / 2;
double h = levelH[level] / 2;
SpatialRelation v = shape.relate(ctx.makeRectangle(cx - w, cx + w, cy - h, cy + h));
SpatialRelation v = shape.relate(ctx.getShapeFactory().rect(cx - w, cx + w, cy - h, cy + h));
if (SpatialRelation.DISJOINT == v) {
return;

View File

@ -53,7 +53,9 @@ public class QuadPrefixTree extends LegacyPrefixTree {
protected SpatialPrefixTree newSPT() {
QuadPrefixTree tree = new QuadPrefixTree(ctx,
maxLevels != null ? maxLevels : MAX_LEVELS_POSSIBLE);
tree.robust = getVersion().onOrAfter(Version.LUCENE_8_3_0);
@SuppressWarnings("deprecation")
Version LUCENE_8_3_0 = Version.LUCENE_8_3_0;
tree.robust = getVersion().onOrAfter(LUCENE_8_3_0);
return tree;
}
}
@ -142,7 +144,7 @@ public class QuadPrefixTree extends LegacyPrefixTree {
public Cell getCell(Point p, int level) {
if (!robust) { // old method
List<Cell> cells = new ArrayList<>(1);
buildNotRobustly(xmid, ymid, 0, cells, new BytesRef(maxLevels+1), ctx.makePoint(p.getX(),p.getY()), level);
buildNotRobustly(xmid, ymid, 0, cells, new BytesRef(maxLevels+1), ctx.getShapeFactory().pointXY(p.getX(),p.getY()), level);
if (!cells.isEmpty()) {
return cells.get(0);//note cells could be longer if p on edge
}
@ -223,7 +225,7 @@ public class QuadPrefixTree extends LegacyPrefixTree {
double h = levelH[level] / 2;
int strlen = str.length;
Rectangle rectangle = ctx.makeRectangle(cx - w, cx + w, cy - h, cy + h);
Rectangle rectangle = ctx.getShapeFactory().rect(cx - w, cx + w, cy - h, cy + h);
SpatialRelation v = shape.relate(rectangle);
if (SpatialRelation.CONTAINS == v) {
str.bytes[str.length++] = (byte)c;//append
@ -354,7 +356,7 @@ public class QuadPrefixTree extends LegacyPrefixTree {
width = gridW;
height = gridH;
}
return ctx.makeRectangle(xmin, xmin + width, ymin, ymin + height);
return ctx.getShapeFactory().rect(xmin, xmin + width, ymin, ymin + height);
}
}//QuadCell
}

View File

@ -197,6 +197,7 @@ public class Geo3dShapeFactory implements S2ShapeFactory {
}
@Override
@SuppressWarnings("deprecation")
public Shape lineString(List<Point> list, double distance) {
LineStringBuilder builder = lineString();
for (Point point : list) {
@ -207,6 +208,7 @@ public class Geo3dShapeFactory implements S2ShapeFactory {
}
@Override
@SuppressWarnings("deprecation")
public <S extends Shape> ShapeCollection<S> multiShape(List<S> list) {
throw new UnsupportedOperationException();
}

View File

@ -35,6 +35,7 @@ import org.junit.Test;
import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.shape.Point;
import org.locationtech.spatial4j.shape.Shape;
import org.locationtech.spatial4j.shape.ShapeFactory;
public class DistanceStrategyTest extends StrategyTestCase {
@ParametersFactory(argumentFormatting = "strategy=%s")
@ -81,20 +82,21 @@ public class DistanceStrategyTest extends StrategyTestCase {
@Test
public void testDistanceOrder() throws IOException {
adoc("100", ctx.makePoint(2, 1));
adoc("101", ctx.makePoint(-1, 4));
ShapeFactory shapeFactory = ctx.getShapeFactory();
adoc("100", shapeFactory.pointXY(2, 1));
adoc("101", shapeFactory.pointXY(-1, 4));
adoc("103", (Shape)null);//test score for nothing
commit();
//FYI distances are in docid order
checkDistValueSource(ctx.makePoint(4, 3), 2.8274937f, 5.0898066f, 180f);
checkDistValueSource(ctx.makePoint(0, 4), 3.6043684f, 0.9975641f, 180f);
checkDistValueSource(shapeFactory.pointXY(4, 3), 2.8274937f, 5.0898066f, 180f);
checkDistValueSource(shapeFactory.pointXY(0, 4), 3.6043684f, 0.9975641f, 180f);
}
@Test
public void testRecipScore() throws IOException {
Point p100 = ctx.makePoint(2.02, 0.98);
Point p100 = ctx.getShapeFactory().pointXY(2.02, 0.98);
adoc("100", p100);
Point p101 = ctx.makePoint(-1.001, 4.001);
Point p101 = ctx.getShapeFactory().pointXY(-1.001, 4.001);
adoc("101", p101);
adoc("103", (Shape)null);//test score for nothing
commit();

View File

@ -36,12 +36,15 @@ import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.distance.DistanceUtils;
import org.locationtech.spatial4j.shape.Point;
import org.locationtech.spatial4j.shape.Shape;
import org.locationtech.spatial4j.shape.ShapeFactory;
/**
* Based off of Solr 3's SpatialFilterTest.
*/
public class PortedSolr3Test extends StrategyTestCase {
private ShapeFactory shapeFactory;
@ParametersFactory(argumentFormatting = "strategy=%s")
public static Iterable<Object[]> parameters() {
List<Object[]> ctorArgs = new ArrayList<>();
@ -74,23 +77,24 @@ public class PortedSolr3Test extends StrategyTestCase {
public PortedSolr3Test(String suiteName, SpatialStrategy strategy) {
this.ctx = strategy.getSpatialContext();
this.strategy = strategy;
shapeFactory = ctx.getShapeFactory();
}
private void setupDocs() throws Exception {
super.deleteAll();
adoc("1", ctx.makePoint(-79.9289094, 32.7693246));
adoc("2", ctx.makePoint(-80.9289094, 33.7693246));
adoc("3", ctx.makePoint(50.9289094, -32.7693246));
adoc("4", ctx.makePoint(60.9289094, -50.7693246));
adoc("5", ctx.makePoint(0, 0));
adoc("6", ctx.makePoint(0.1, 0.1));
adoc("7", ctx.makePoint(-0.1, -0.1));
adoc("8", ctx.makePoint(179.9, 0));
adoc("9", ctx.makePoint(-179.9, 0));
adoc("10", ctx.makePoint(50, 89.9));
adoc("11", ctx.makePoint(-130, 89.9));
adoc("12", ctx.makePoint(50, -89.9));
adoc("13", ctx.makePoint(-130, -89.9));
adoc("1", shapeFactory.pointXY(-79.9289094, 32.7693246));
adoc("2", shapeFactory.pointXY(-80.9289094, 33.7693246));
adoc("3", shapeFactory.pointXY(50.9289094, -32.7693246));
adoc("4", shapeFactory.pointXY(60.9289094, -50.7693246));
adoc("5", shapeFactory.pointXY(0, 0));
adoc("6", shapeFactory.pointXY(0.1, 0.1));
adoc("7", shapeFactory.pointXY(-0.1, -0.1));
adoc("8", shapeFactory.pointXY(179.9, 0));
adoc("9", shapeFactory.pointXY(-179.9, 0));
adoc("10", shapeFactory.pointXY(50, 89.9));
adoc("11", shapeFactory.pointXY(-130, 89.9));
adoc("12", shapeFactory.pointXY(50, -89.9));
adoc("13", shapeFactory.pointXY(-130, -89.9));
commit();
}
@ -100,39 +104,39 @@ public class PortedSolr3Test extends StrategyTestCase {
setupDocs();
//Try some edge cases
//NOTE: 2nd arg is distance in kilometers
checkHitsCircle(ctx.makePoint(1, 1), 175, 3, 5, 6, 7);
checkHitsCircle(ctx.makePoint(179.8, 0), 200, 2, 8, 9);
checkHitsCircle(ctx.makePoint(50, 89.8), 200, 2, 10, 11);//this goes over the north pole
checkHitsCircle(ctx.makePoint(50, -89.8), 200, 2, 12, 13);//this goes over the south pole
checkHitsCircle(shapeFactory.pointXY(1, 1), 175, 3, 5, 6, 7);
checkHitsCircle(shapeFactory.pointXY(179.8, 0), 200, 2, 8, 9);
checkHitsCircle(shapeFactory.pointXY(50, 89.8), 200, 2, 10, 11);//this goes over the north pole
checkHitsCircle(shapeFactory.pointXY(50, -89.8), 200, 2, 12, 13);//this goes over the south pole
//try some normal cases
checkHitsCircle(ctx.makePoint(-80.0, 33.0), 300, 2);
checkHitsCircle(shapeFactory.pointXY(-80.0, 33.0), 300, 2);
//large distance
checkHitsCircle(ctx.makePoint(1, 1), 5000, 3, 5, 6, 7);
checkHitsCircle(shapeFactory.pointXY(1, 1), 5000, 3, 5, 6, 7);
//Because we are generating a box based on the west/east longitudes and the south/north latitudes, which then
//translates to a range query, which is slightly more inclusive. Thus, even though 0.0 is 15.725 kms away,
//it will be included, b/zScaling of the box calculation.
checkHitsBBox(ctx.makePoint(0.1, 0.1), 15, 2, 5, 6);
checkHitsBBox(shapeFactory.pointXY(0.1, 0.1), 15, 2, 5, 6);
//try some more
deleteAll();
adoc("14", ctx.makePoint(5, 0));
adoc("15", ctx.makePoint(15, 0));
adoc("14", shapeFactory.pointXY(5, 0));
adoc("15", shapeFactory.pointXY(15, 0));
//3000KM from 0,0, see http://www.movable-type.co.uk/scripts/latlong.html
adoc("16", ctx.makePoint(19.79750, 18.71111));
adoc("17", ctx.makePoint(-95.436643, 44.043900));
adoc("16", shapeFactory.pointXY(19.79750, 18.71111));
adoc("17", shapeFactory.pointXY(-95.436643, 44.043900));
commit();
checkHitsCircle(ctx.makePoint(0, 0), 1000, 1, 14);
checkHitsCircle(ctx.makePoint(0, 0), 2000, 2, 14, 15);
checkHitsBBox(ctx.makePoint(0, 0), 3000, 3, 14, 15, 16);
checkHitsCircle(ctx.makePoint(0, 0), 3001, 3, 14, 15, 16);
checkHitsCircle(ctx.makePoint(0, 0), 3000.1, 3, 14, 15, 16);
checkHitsCircle(shapeFactory.pointXY(0, 0), 1000, 1, 14);
checkHitsCircle(shapeFactory.pointXY(0, 0), 2000, 2, 14, 15);
checkHitsBBox(shapeFactory.pointXY(0, 0), 3000, 3, 14, 15, 16);
checkHitsCircle(shapeFactory.pointXY(0, 0), 3001, 3, 14, 15, 16);
checkHitsCircle(shapeFactory.pointXY(0, 0), 3000.1, 3, 14, 15, 16);
//really fine grained distance and reflects some of the vagaries of how we are calculating the box
checkHitsCircle(ctx.makePoint(-96.789603, 43.517030), 109, 0);
checkHitsCircle(shapeFactory.pointXY(-96.789603, 43.517030), 109, 0);
// falls outside of the real distance, but inside the bounding box
checkHitsCircle(ctx.makePoint(-96.789603, 43.517030), 110, 0);
checkHitsBBox(ctx.makePoint(-96.789603, 43.517030), 110, 1, 17);
checkHitsCircle(shapeFactory.pointXY(-96.789603, 43.517030), 110, 0);
checkHitsBBox(shapeFactory.pointXY(-96.789603, 43.517030), 110, 1, 17);
}
//---- these are similar to Solr test methods
@ -147,7 +151,7 @@ public class PortedSolr3Test extends StrategyTestCase {
private void _checkHits(boolean bbox, Point pt, double distKM, int assertNumFound, int... assertIds) {
SpatialOperation op = SpatialOperation.Intersects;
double distDEG = DistanceUtils.dist2Degrees(distKM, DistanceUtils.EARTH_MEAN_RADIUS_KM);
Shape shape = ctx.makeCircle(pt, distDEG);
Shape shape = shapeFactory.circle(pt, distDEG);
if (bbox)
shape = shape.getBoundingBox();

View File

@ -103,12 +103,12 @@ public class QueryEqualsHashCodeTest extends LuceneTestCase {
}
private SpatialArgs makeArgs1() {
final Shape shape1 = ctx.makeRectangle(0, 0, 10, 10);
final Shape shape1 = ctx.getShapeFactory().rect(0, 0, 10, 10);
return new SpatialArgs(predicate, shape1);
}
private SpatialArgs makeArgs2() {
final Shape shape2 = ctx.makeRectangle(0, 0, 20, 20);
final Shape shape2 = ctx.getShapeFactory().rect(0, 0, 20, 20);
return new SpatialArgs(predicate, shape2);
}

View File

@ -108,14 +108,14 @@ public class SpatialExample extends LuceneTestCase {
//Spatial4j is x-y order for arguments
indexWriter.addDocument(newSampleDocument(
2, ctx.makePoint(-80.93, 33.77)));
2, ctx.getShapeFactory().pointXY(-80.93, 33.77)));
//Spatial4j has a WKT parser which is also "x y" order
indexWriter.addDocument(newSampleDocument(
4, ctx.readShapeFromWkt("POINT(60.9289094 -50.7693246)")));
indexWriter.addDocument(newSampleDocument(
20, ctx.makePoint(0.1,0.1), ctx.makePoint(0, 0)));
20, ctx.getShapeFactory().pointXY(0.1,0.1), ctx.getShapeFactory().pointXY(0, 0)));
indexWriter.close();
}
@ -149,7 +149,7 @@ public class SpatialExample extends LuceneTestCase {
//Search with circle
//note: SpatialArgs can be parsed from a string
SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects,
ctx.makeCircle(-80.0, 33.0, DistanceUtils.dist2Degrees(200, DistanceUtils.EARTH_MEAN_RADIUS_KM)));
ctx.getShapeFactory().circle(-80.0, 33.0, DistanceUtils.dist2Degrees(200, DistanceUtils.EARTH_MEAN_RADIUS_KM)));
Query query = strategy.makeQuery(args);
TopDocs docs = indexSearcher.search(query, 10, idSort);
assertDocMatchedIds(indexSearcher, docs, 2);
@ -168,7 +168,7 @@ public class SpatialExample extends LuceneTestCase {
}
//--Match all, order by distance ascending
{
Point pt = ctx.makePoint(60, -50);
Point pt = ctx.getShapeFactory().pointXY(60, -50);
DoubleValuesSource valueSource = strategy.makeDistanceValueSource(pt, DistanceUtils.DEG_TO_KM);//the distance (in km)
Sort distSort = new Sort(valueSource.getSortField(false)).rewrite(indexSearcher);//false=asc dist
TopDocs docs = indexSearcher.search(new MatchAllDocsQuery(), 10, distSort);
@ -183,7 +183,7 @@ public class SpatialExample extends LuceneTestCase {
//demo arg parsing
{
SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects,
ctx.makeCircle(-80.0, 33.0, 1));
ctx.getShapeFactory().circle(-80.0, 33.0, 1));
SpatialArgs args2 = new SpatialArgsParser().parse("Intersects(BUFFER(POINT(-80 33),1))", ctx);
assertEquals(args.toString(),args2.toString());
}

View File

@ -41,7 +41,6 @@ import org.locationtech.spatial4j.shape.Rectangle;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomDouble;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomGaussian;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
/** A base test class for spatial lucene. It's mostly Lucene generic. */
@ -121,7 +120,7 @@ public abstract class SpatialTestCase extends LuceneTestCase {
protected Point randomPoint() {
final Rectangle WB = ctx.getWorldBounds();
return ctx.makePoint(
return ctx.getShapeFactory().pointXY(
randomIntBetween((int) WB.getMinX(), (int) WB.getMaxX()),
randomIntBetween((int) WB.getMinY(), (int) WB.getMaxY()));
}
@ -143,7 +142,7 @@ public abstract class SpatialTestCase extends LuceneTestCase {
double yMin = yNewStartAndHeight[0];
double yMax = yMin + yNewStartAndHeight[1];
return ctx.makeRectangle(xMin, xMax, yMin, yMax);
return ctx.getShapeFactory().rect(xMin, xMax, yMin, yMax);
}
/** Returns new minStart and new length that is inside the range specified by the arguments. */
@ -154,7 +153,7 @@ public abstract class SpatialTestCase extends LuceneTestCase {
int intBoundEnd = (int) (boundStart + boundLen);
int intBoundLen = intBoundEnd - intBoundStart;
int newLen = (int) randomGaussianMeanMax(intBoundLen / 16.0, intBoundLen);
int newStart = intBoundStart + randomInt(intBoundLen - newLen);
int newStart = intBoundStart + randomIntBetween(0, intBoundLen - newLen);
return new double[]{newStart, newLen};
} else { // (no int rounding)
double newLen = randomGaussianMeanMax(boundLen / 16, boundLen);

Some files were not shown because too many files have changed in this diff Show More