mirror of
https://github.com/apache/lucene.git
synced 2025-02-07 02:28:49 +00:00
SOLR-13952: reverting Erick's commit (with permission).
This commit is contained in:
parent
b5fd6d7b22
commit
063c82ebd6
@ -37,7 +37,6 @@ public class FixBrokenOffsetsFilterFactory extends TokenFilterFactory {
|
||||
super(args);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public TokenStream create(TokenStream input) {
|
||||
return new FixBrokenOffsetsFilter(input);
|
||||
|
@ -33,6 +33,8 @@ import org.apache.lucene.analysis.util.ResourceLoaderAware;
|
||||
import org.apache.lucene.analysis.util.TokenFilterFactory;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
|
||||
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.*;
|
||||
|
||||
/**
|
||||
* Factory for {@link WordDelimiterFilter}.
|
||||
* <pre class="prettyprint">
|
||||
@ -74,31 +76,31 @@ public class WordDelimiterFilterFactory extends TokenFilterFactory implements Re
|
||||
super(args);
|
||||
int flags = 0;
|
||||
if (getInt(args, "generateWordParts", 1) != 0) {
|
||||
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_WORD_PARTS;
|
||||
flags |= GENERATE_WORD_PARTS;
|
||||
}
|
||||
if (getInt(args, "generateNumberParts", 1) != 0) {
|
||||
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_NUMBER_PARTS;
|
||||
flags |= GENERATE_NUMBER_PARTS;
|
||||
}
|
||||
if (getInt(args, "catenateWords", 0) != 0) {
|
||||
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_WORDS;
|
||||
flags |= CATENATE_WORDS;
|
||||
}
|
||||
if (getInt(args, "catenateNumbers", 0) != 0) {
|
||||
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_NUMBERS;
|
||||
flags |= CATENATE_NUMBERS;
|
||||
}
|
||||
if (getInt(args, "catenateAll", 0) != 0) {
|
||||
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_ALL;
|
||||
flags |= CATENATE_ALL;
|
||||
}
|
||||
if (getInt(args, "splitOnCaseChange", 1) != 0) {
|
||||
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_CASE_CHANGE;
|
||||
flags |= SPLIT_ON_CASE_CHANGE;
|
||||
}
|
||||
if (getInt(args, "splitOnNumerics", 1) != 0) {
|
||||
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_NUMERICS;
|
||||
flags |= SPLIT_ON_NUMERICS;
|
||||
}
|
||||
if (getInt(args, "preserveOriginal", 0) != 0) {
|
||||
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.PRESERVE_ORIGINAL;
|
||||
flags |= PRESERVE_ORIGINAL;
|
||||
}
|
||||
if (getInt(args, "stemEnglishPossessive", 1) != 0) {
|
||||
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE;
|
||||
flags |= STEM_ENGLISH_POSSESSIVE;
|
||||
}
|
||||
wordFiles = get(args, PROTECTED_TOKENS);
|
||||
types = get(args, TYPES);
|
||||
@ -160,17 +162,17 @@ public class WordDelimiterFilterFactory extends TokenFilterFactory implements Re
|
||||
|
||||
private Byte parseType(String s) {
|
||||
if (s.equals("LOWER"))
|
||||
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.LOWER;
|
||||
return LOWER;
|
||||
else if (s.equals("UPPER"))
|
||||
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.UPPER;
|
||||
return UPPER;
|
||||
else if (s.equals("ALPHA"))
|
||||
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.ALPHA;
|
||||
return ALPHA;
|
||||
else if (s.equals("DIGIT"))
|
||||
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.DIGIT;
|
||||
return DIGIT;
|
||||
else if (s.equals("ALPHANUM"))
|
||||
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.ALPHANUM;
|
||||
return ALPHANUM;
|
||||
else if (s.equals("SUBWORD_DELIM"))
|
||||
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SUBWORD_DELIM;
|
||||
return SUBWORD_DELIM;
|
||||
else
|
||||
return null;
|
||||
}
|
||||
|
@ -210,7 +210,6 @@ public class SynonymMap {
|
||||
/**
|
||||
* Builds an {@link SynonymMap} and returns it.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public SynonymMap build() throws IOException {
|
||||
ByteSequenceOutputs outputs = ByteSequenceOutputs.getSingleton();
|
||||
// TODO: are we using the best sharing options?
|
||||
|
@ -30,7 +30,6 @@ import org.apache.lucene.analysis.util.BaseTokenStreamFactoryTestCase;
|
||||
*/
|
||||
public class TestSoraniStemFilterFactory extends BaseTokenStreamFactoryTestCase {
|
||||
|
||||
@SuppressWarnings("resource")
|
||||
public void testStemming() throws Exception {
|
||||
Reader reader = new StringReader("پیاوەکان");
|
||||
TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false);
|
||||
|
@ -1,22 +1,19 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE hyphenation-info SYSTEM "hyphenation.dtd">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
Copyright 1999-2004 The Apache Software Foundation
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<!--
|
||||
This file contains the hyphenation patterns for danish.
|
||||
|
@ -1,22 +1,17 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE hyphenation-info SYSTEM "hyphenation.dtd">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<!--
|
||||
This file contains the hyphenation patterns for danish.
|
||||
|
@ -1,21 +1,18 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
Copyright 1999-2004 The Apache Software Foundation
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<!-- Id: hyphenation.dtd,v 1.3 2004/02/27 18:34:59 jeremias Exp $ -->
|
||||
|
||||
|
@ -257,7 +257,6 @@ public class TestBugInSomething extends BaseTokenStreamTestCase {
|
||||
-119, 0, 92, 94, -36, 53, -9, -102, -18, 90, 94, -26, 31, 71, -20
|
||||
};
|
||||
Analyzer a = new Analyzer() {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected TokenStreamComponents createComponents(String fieldName) {
|
||||
Tokenizer tokenizer = new WikipediaTokenizer();
|
||||
|
@ -82,6 +82,7 @@ import org.apache.lucene.analysis.miscellaneous.LimitTokenOffsetFilter;
|
||||
import org.apache.lucene.analysis.miscellaneous.LimitTokenPositionFilter;
|
||||
import org.apache.lucene.analysis.miscellaneous.StemmerOverrideFilter;
|
||||
import org.apache.lucene.analysis.miscellaneous.StemmerOverrideFilter.StemmerOverrideMap;
|
||||
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter;
|
||||
import org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilter;
|
||||
import org.apache.lucene.analysis.path.PathHierarchyTokenizer;
|
||||
import org.apache.lucene.analysis.path.ReversePathHierarchyTokenizer;
|
||||
@ -111,7 +112,6 @@ import org.tartarus.snowball.SnowballProgram;
|
||||
import org.xml.sax.InputSource;
|
||||
|
||||
/** tests random analysis chains */
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestRandomChains extends BaseTokenStreamTestCase {
|
||||
|
||||
static List<Constructor<? extends Tokenizer>> tokenizers;
|
||||
@ -193,7 +193,7 @@ public class TestRandomChains extends BaseTokenStreamTestCase {
|
||||
// TODO: it seems to mess up offsets!?
|
||||
WikipediaTokenizer.class,
|
||||
// TODO: needs to be a tokenizer, doesnt handle graph inputs properly (a shingle or similar following will then cause pain)
|
||||
org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.class,
|
||||
WordDelimiterFilter.class,
|
||||
// Cannot correct offsets when a char filter had changed them:
|
||||
WordDelimiterGraphFilter.class,
|
||||
// requires a special encoded token value, so it may fail with random data:
|
||||
@ -321,7 +321,6 @@ public class TestRandomChains extends BaseTokenStreamTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
private static final Map<Class<?>,Function<Random,Object>> argProducers = new IdentityHashMap<Class<?>,Function<Random,Object>>() {{
|
||||
put(int.class, random -> {
|
||||
// TODO: could cause huge ram usage to use full int range for some filters
|
||||
|
@ -47,7 +47,6 @@ import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.SetOnce.AlreadySetException;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
|
||||
|
||||
// Test some examples (TODO: we only check behavior, we may need something like TestRandomChains...)
|
||||
|
@ -25,6 +25,7 @@ import org.apache.lucene.analysis.StopFilter;
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.synonym.SynonymFilter;
|
||||
import org.apache.lucene.analysis.synonym.SynonymGraphFilter;
|
||||
import org.apache.lucene.analysis.synonym.SynonymMap;
|
||||
import org.apache.lucene.util.CharsRef;
|
||||
@ -68,19 +69,17 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
|
||||
assertTokenStreamContents(stream, new String[]{builder.toCharsRef().toString()}, null, null, new int[]{1});
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
public void testWithSynonym() throws Exception {
|
||||
SynonymMap.Builder builder = new SynonymMap.Builder(true);
|
||||
builder.add(new CharsRef("mykeyword"), new CharsRef("mysynonym"), true);
|
||||
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
|
||||
tokenizer.setReader(new StringReader("mykeyword"));
|
||||
org.apache.lucene.analysis.synonym.SynonymFilter filter = new org.apache.lucene.analysis.synonym.SynonymFilter(tokenizer, builder.build(), true);
|
||||
SynonymFilter filter = new SynonymFilter(tokenizer, builder.build(), true);
|
||||
ConcatenateGraphFilter stream = new ConcatenateGraphFilter(filter);
|
||||
assertTokenStreamContents(stream, new String[] {"mykeyword", "mysynonym"}, null, null, new int[] { 1, 0 });
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
public void testWithSynonyms() throws Exception {
|
||||
SynonymMap.Builder builder = new SynonymMap.Builder(true);
|
||||
@ -88,7 +87,7 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
|
||||
Tokenizer tokenStream = new MockTokenizer(MockTokenizer.WHITESPACE, true);
|
||||
String input = "mykeyword another keyword";
|
||||
tokenStream.setReader(new StringReader(input));
|
||||
org.apache.lucene.analysis.synonym.SynonymFilter filter = new org.apache.lucene.analysis.synonym.SynonymFilter(tokenStream, builder.build(), true);
|
||||
SynonymFilter filter = new SynonymFilter(tokenStream, builder.build(), true);
|
||||
ConcatenateGraphFilter stream = new ConcatenateGraphFilter(filter, SEP_LABEL, false, 100);
|
||||
String[] expectedOutputs = new String[2];
|
||||
CharsRefBuilder expectedOutput = new CharsRefBuilder();
|
||||
@ -133,7 +132,6 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
public void testValidNumberOfExpansions() throws IOException {
|
||||
SynonymMap.Builder builder = new SynonymMap.Builder(true);
|
||||
@ -147,7 +145,7 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
|
||||
}
|
||||
MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
|
||||
tokenizer.setReader(new StringReader(valueBuilder.toString()));
|
||||
org.apache.lucene.analysis.synonym.SynonymFilter filter = new org.apache.lucene.analysis.synonym.SynonymFilter(tokenizer, builder.build(), true);
|
||||
SynonymFilter filter = new SynonymFilter(tokenizer, builder.build(), true);
|
||||
|
||||
int count;
|
||||
try (ConcatenateGraphFilter stream = new ConcatenateGraphFilter(filter)) {
|
||||
|
@ -31,7 +31,6 @@ import org.apache.lucene.store.Directory;
|
||||
|
||||
public class TestFixBrokenOffsetsFilter extends BaseTokenStreamTestCase {
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testBogusTermVectors() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
|
||||
|
@ -20,6 +20,7 @@ import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.synonym.SynonymFilter;
|
||||
import org.apache.lucene.analysis.synonym.SynonymMap;
|
||||
import org.apache.lucene.util.CharsRef;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
@ -59,7 +60,6 @@ public class TestLimitTokenPositionFilter extends BaseTokenStreamTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testMaxPosition3WithSynomyms() throws IOException {
|
||||
for (final boolean consumeAll : new boolean[]{true, false}) {
|
||||
MockTokenizer tokenizer = whitespaceMockTokenizer("one two three four five");
|
||||
@ -76,7 +76,7 @@ public class TestLimitTokenPositionFilter extends BaseTokenStreamTestCase {
|
||||
SynonymMap.Builder.join(new String[]{"dopple", "ganger"}, multiWordCharsRef);
|
||||
builder.add(new CharsRef("two"), multiWordCharsRef.get(), true);
|
||||
SynonymMap synonymMap = builder.build();
|
||||
TokenStream stream = new org.apache.lucene.analysis.synonym.SynonymFilter(tokenizer, synonymMap, true);
|
||||
TokenStream stream = new SynonymFilter(tokenizer, synonymMap, true);
|
||||
stream = new LimitTokenPositionFilter(stream, 3, consumeAll);
|
||||
|
||||
// "only", the 4th word of multi-word synonym "and indubitably single only" is not emitted, since its position is greater than 3.
|
||||
|
@ -156,7 +156,6 @@ public class TestRemoveDuplicatesTokenFilter extends BaseTokenStreamTestCase {
|
||||
@Override
|
||||
protected TokenStreamComponents createComponents(String fieldName) {
|
||||
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
|
||||
@SuppressWarnings("deprecation")
|
||||
TokenStream stream = new SynonymFilter(tokenizer, map, ignoreCase);
|
||||
return new TokenStreamComponents(tokenizer, new RemoveDuplicatesTokenFilter(stream));
|
||||
}
|
||||
|
@ -54,7 +54,6 @@ import static org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator.DEF
|
||||
* TODO: should explicitly test things like protWords and not rely on
|
||||
* the factory tests in Solr.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestWordDelimiterFilter extends BaseTokenStreamTestCase {
|
||||
|
||||
/*
|
||||
|
@ -119,7 +119,6 @@ public class EdgeNGramTokenizerTest extends BaseTokenStreamTestCase {
|
||||
false);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private static void testNGrams(int minGram, int maxGram, int length, final String nonTokenChars) throws IOException {
|
||||
final String s = RandomStrings.randomAsciiOfLength(random(), length);
|
||||
testNGrams(minGram, maxGram, s, nonTokenChars);
|
||||
|
@ -119,7 +119,6 @@ public class NGramTokenizerTest extends BaseTokenStreamTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private static void testNGrams(int minGram, int maxGram, int length, final String nonTokenChars) throws IOException {
|
||||
final String s = RandomStrings.randomAsciiOfLength(random(), length);
|
||||
testNGrams(minGram, maxGram, s, nonTokenChars);
|
||||
|
@ -47,7 +47,6 @@ public class TestSolrSynonymParser extends BaseSynonymParserTestCase {
|
||||
analyzer.close();
|
||||
|
||||
analyzer = new Analyzer() {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected TokenStreamComponents createComponents(String fieldName) {
|
||||
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
|
||||
@ -139,7 +138,6 @@ public class TestSolrSynonymParser extends BaseSynonymParserTestCase {
|
||||
final SynonymMap map = parser.build();
|
||||
analyzer.close();
|
||||
analyzer = new Analyzer() {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected TokenStreamComponents createComponents(String fieldName) {
|
||||
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.KEYWORD, false);
|
||||
@ -173,7 +171,6 @@ public class TestSolrSynonymParser extends BaseSynonymParserTestCase {
|
||||
analyzer.close();
|
||||
|
||||
analyzer = new Analyzer() {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected TokenStreamComponents createComponents(String fieldName) {
|
||||
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
|
||||
|
@ -30,7 +30,6 @@ import org.apache.lucene.util.Version;
|
||||
public class TestSynonymFilterFactory extends BaseTokenStreamFactoryTestCase {
|
||||
|
||||
/** checks for synonyms of "GB" in synonyms.txt */
|
||||
@SuppressWarnings("deprecation")
|
||||
private void checkSolrSynonyms(TokenFilterFactory factory) throws Exception {
|
||||
Reader reader = new StringReader("GB");
|
||||
TokenStream stream = whitespaceMockTokenizer(reader);
|
||||
@ -42,7 +41,6 @@ public class TestSynonymFilterFactory extends BaseTokenStreamFactoryTestCase {
|
||||
}
|
||||
|
||||
/** checks for synonyms of "second" in synonyms-wordnet.txt */
|
||||
@SuppressWarnings("deprecation")
|
||||
private void checkWordnetSynonyms(TokenFilterFactory factory) throws Exception {
|
||||
Reader reader = new StringReader("second");
|
||||
TokenStream stream = whitespaceMockTokenizer(reader);
|
||||
|
@ -39,7 +39,6 @@ import org.apache.lucene.analysis.tokenattributes.*;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
|
||||
|
||||
private SynonymMap.Builder b;
|
||||
|
@ -45,7 +45,6 @@ public class TestWordnetSynonymParser extends BaseTokenStreamTestCase {
|
||||
analyzer.close();
|
||||
|
||||
analyzer = new Analyzer() {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected TokenStreamComponents createComponents(String fieldName) {
|
||||
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
|
||||
|
@ -29,7 +29,6 @@ import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestAnalysisSPILoader extends LuceneTestCase {
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
private Map<String,String> versionArgOnly() {
|
||||
return new HashMap<String, String>() {{
|
||||
put("luceneMatchVersion", Version.LATEST.toString());
|
||||
|
@ -54,7 +54,6 @@ public class TestFilesystemResourceLoader extends LuceneTestCase {
|
||||
rl.newInstance("org.apache.lucene.analysis.util.RollingCharBuffer", Object.class).getClass().getName());
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testBaseDir() throws Exception {
|
||||
final Path base = createTempDir("fsResourceLoaderBase");
|
||||
Writer os = Files.newBufferedWriter(base.resolve("template.txt"), StandardCharsets.UTF_8);
|
||||
|
@ -73,7 +73,6 @@ public final class ICUTransformFilter extends TokenFilter {
|
||||
* @param input {@link TokenStream} to filter.
|
||||
* @param transform Transliterator to transform the text.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public ICUTransformFilter(TokenStream input, Transliterator transform) {
|
||||
super(input);
|
||||
this.transform = transform;
|
||||
|
@ -169,7 +169,6 @@ public class SpatialDocMaker extends DocMaker {
|
||||
final boolean bbox = config.get(configKeyPrefix + "bbox", false);
|
||||
|
||||
return new ShapeConverter() {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Shape convert(Shape shape) {
|
||||
if (shape instanceof Point && (radiusDegrees != 0.0 || plusMinus != 0.0)) {
|
||||
@ -218,7 +217,6 @@ public class SpatialDocMaker extends DocMaker {
|
||||
return doc;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public static Shape makeShapeFromString(SpatialStrategy strategy, String name, String shapeStr) {
|
||||
if (shapeStr != null && shapeStr.length() > 0) {
|
||||
try {
|
||||
|
@ -288,7 +288,7 @@ public class AnalyzerFactoryTask extends PerfTask {
|
||||
* @param stok stream tokenizer from which to draw analysis factory params
|
||||
* @param clazz analysis factory class to instantiate
|
||||
*/
|
||||
@SuppressWarnings({"fallthrough", "deprecation"})
|
||||
@SuppressWarnings("fallthrough")
|
||||
private void createAnalysisPipelineComponent
|
||||
(StreamTokenizer stok, Class<? extends AbstractAnalysisFactory> clazz) {
|
||||
Map<String,String> argMap = new HashMap<>();
|
||||
|
@ -33,7 +33,6 @@ import org.apache.lucene.util.SuppressForbidden;
|
||||
@SuppressForbidden(reason = "Uses a Long instance as a marker")
|
||||
public final class PositiveIntOutputs extends Outputs<Long> {
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private final static Long NO_OUTPUT = new Long(0);
|
||||
|
||||
private final static PositiveIntOutputs singleton = new PositiveIntOutputs();
|
||||
|
@ -57,7 +57,6 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||
assertTrue(set.contains(new String(findme,1,4)));
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@SuppressForbidden(reason = "Explicitly checking new Integers")
|
||||
public void testObjectContains() {
|
||||
CharArraySet set = new CharArraySet(10, true);
|
||||
@ -212,6 +211,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecated")
|
||||
public void testCopyCharArraySetBWCompat() {
|
||||
CharArraySet setIngoreCase = new CharArraySet(10, true);
|
||||
CharArraySet setCaseSensitive = new CharArraySet(10, false);
|
||||
|
@ -212,7 +212,6 @@ public class TestDocument extends LuceneTestCase {
|
||||
dir.close();
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testGetValues() {
|
||||
Document doc = makeDocumentWithFields();
|
||||
assertEquals(new String[] {"test1", "test2"},
|
||||
|
@ -149,7 +149,6 @@ public class TestFieldUpdatesBuffer extends LuceneTestCase {
|
||||
assertFalse(buffer.isNumeric());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <T extends DocValuesUpdate> T getRandomUpdate(boolean binary) {
|
||||
String termField = RandomPicks.randomFrom(random(), Arrays.asList("id", "_id", "some_other_field"));
|
||||
String docId = "" + random().nextInt(10);
|
||||
|
@ -33,6 +33,8 @@ import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
||||
|
||||
/** threshold for comparing floats */
|
||||
@ -41,9 +43,8 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
||||
static Directory small;
|
||||
static IndexReader reader;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
static public void assertEquals(String m, int e, int a) {
|
||||
junit.framework.Assert.assertEquals(m, e, a);
|
||||
Assert.assertEquals(m, e, a);
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
|
@ -281,7 +281,6 @@ public class TestSearchAfter extends LuceneTestCase {
|
||||
assertEquals(all.scoreDocs.length, pageStart);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
void assertPage(int pageStart, TopDocs all, TopDocs paged) throws IOException {
|
||||
assertEquals(all.totalHits.value, paged.totalHits.value);
|
||||
for (int i = 0; i < paged.scoreDocs.length; i++) {
|
||||
|
@ -20,7 +20,6 @@ import java.util.Arrays;
|
||||
|
||||
|
||||
public class TestCharsRef extends LuceneTestCase {
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testUTF16InUTF8Order() {
|
||||
final int numStrings = atLeast(1000);
|
||||
BytesRef utf8[] = new BytesRef[numStrings];
|
||||
|
@ -23,7 +23,6 @@ import java.text.ParseException;
|
||||
import java.util.Locale;
|
||||
import java.util.Random;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestVersion extends LuceneTestCase {
|
||||
|
||||
public void testOnOrAfter() throws Exception {
|
||||
|
@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.util;
|
||||
|
||||
public class TestVirtualMethod extends LuceneTestCase {
|
||||
|
||||
private static final VirtualMethod<TestVirtualMethod> publicTestMethod =
|
||||
new VirtualMethod<>(TestVirtualMethod.class, "publicTest", String.class);
|
||||
private static final VirtualMethod<TestVirtualMethod> protectedTestMethod =
|
||||
new VirtualMethod<>(TestVirtualMethod.class, "protectedTest", int.class);
|
||||
|
||||
public void publicTest(String test) {}
|
||||
protected void protectedTest(int test) {}
|
||||
|
||||
static class TestClass1 extends TestVirtualMethod {
|
||||
@Override
|
||||
public void publicTest(String test) {}
|
||||
@Override
|
||||
protected void protectedTest(int test) {}
|
||||
}
|
||||
|
||||
static class TestClass2 extends TestClass1 {
|
||||
@Override // make it public here
|
||||
public void protectedTest(int test) {}
|
||||
}
|
||||
|
||||
static class TestClass3 extends TestClass2 {
|
||||
@Override
|
||||
public void publicTest(String test) {}
|
||||
}
|
||||
|
||||
static class TestClass4 extends TestVirtualMethod {
|
||||
}
|
||||
|
||||
static class TestClass5 extends TestClass4 {
|
||||
}
|
||||
|
||||
public void testGeneral() {
|
||||
assertEquals(0, publicTestMethod.getImplementationDistance(this.getClass()));
|
||||
assertEquals(1, publicTestMethod.getImplementationDistance(TestClass1.class));
|
||||
assertEquals(1, publicTestMethod.getImplementationDistance(TestClass2.class));
|
||||
assertEquals(3, publicTestMethod.getImplementationDistance(TestClass3.class));
|
||||
assertFalse(publicTestMethod.isOverriddenAsOf(TestClass4.class));
|
||||
assertFalse(publicTestMethod.isOverriddenAsOf(TestClass5.class));
|
||||
|
||||
assertEquals(0, protectedTestMethod.getImplementationDistance(this.getClass()));
|
||||
assertEquals(1, protectedTestMethod.getImplementationDistance(TestClass1.class));
|
||||
assertEquals(2, protectedTestMethod.getImplementationDistance(TestClass2.class));
|
||||
assertEquals(2, protectedTestMethod.getImplementationDistance(TestClass3.class));
|
||||
assertFalse(protectedTestMethod.isOverriddenAsOf(TestClass4.class));
|
||||
assertFalse(protectedTestMethod.isOverriddenAsOf(TestClass5.class));
|
||||
|
||||
assertTrue(VirtualMethod.compareImplementationDistance(TestClass3.class, publicTestMethod, protectedTestMethod) > 0);
|
||||
assertEquals(0, VirtualMethod.compareImplementationDistance(TestClass5.class, publicTestMethod, protectedTestMethod));
|
||||
}
|
||||
|
||||
@SuppressWarnings({"rawtypes","unchecked"})
|
||||
public void testExceptions() {
|
||||
// LuceneTestCase is not a subclass and can never override publicTest(String)
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
// cast to Class to remove generics:
|
||||
publicTestMethod.getImplementationDistance((Class) LuceneTestCase.class);
|
||||
});
|
||||
|
||||
// Method bogus() does not exist, so IAE should be thrown
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
new VirtualMethod<>(TestVirtualMethod.class, "bogus");
|
||||
});
|
||||
|
||||
// Method publicTest(String) is not declared in TestClass2, so IAE should be thrown
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
new VirtualMethod<>(TestClass2.class, "publicTest", String.class);
|
||||
});
|
||||
|
||||
// try to create a second instance of the same baseClass / method combination
|
||||
expectThrows(UnsupportedOperationException.class, () -> {
|
||||
new VirtualMethod<>(TestVirtualMethod.class, "publicTest", String.class);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
@ -39,7 +39,6 @@ public class Test2BFST extends LuceneTestCase {
|
||||
|
||||
private static long LIMIT = 3L*1024*1024*1024;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void test() throws Exception {
|
||||
assumeWorkingMMapOnWindows();
|
||||
|
||||
|
@ -567,7 +567,7 @@ public class TestFSTs extends LuceneTestCase {
|
||||
} else {
|
||||
// Get by output
|
||||
final Long output = (Long) getOutput(intsRef.get(), ord);
|
||||
@SuppressWarnings({"unchecked", "deprecation"}) final IntsRef actual = Util.getByOutput((FST<Long>) fst, output.longValue());
|
||||
@SuppressWarnings("unchecked") final IntsRef actual = Util.getByOutput((FST<Long>) fst, output.longValue());
|
||||
if (actual == null) {
|
||||
throw new RuntimeException("unexpected null input from output=" + output);
|
||||
}
|
||||
@ -787,7 +787,6 @@ public class TestFSTs extends LuceneTestCase {
|
||||
}
|
||||
*/
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testSimple() throws Exception {
|
||||
|
||||
// Get outputs -- passing true means FST will share
|
||||
|
@ -404,7 +404,6 @@ public class DrillSideways {
|
||||
}
|
||||
|
||||
/** Runs a search, using a {@link CollectorManager} to gather and merge search results */
|
||||
@SuppressWarnings("unchecked")
|
||||
public <R> ConcurrentDrillSidewaysResult<R> search(final DrillDownQuery query,
|
||||
final CollectorManager<?, R> hitCollectorManager) throws IOException {
|
||||
|
||||
|
@ -287,11 +287,9 @@ public class TestGrouping extends LuceneTestCase {
|
||||
|
||||
private Collection<SearchGroup<BytesRef>> getSearchGroups(FirstPassGroupingCollector<?> c, int groupOffset) throws IOException {
|
||||
if (TermGroupSelector.class.isAssignableFrom(c.getGroupSelector().getClass())) {
|
||||
@SuppressWarnings("unchecked")
|
||||
FirstPassGroupingCollector<BytesRef> collector = (FirstPassGroupingCollector<BytesRef>) c;
|
||||
return collector.getTopGroups(groupOffset);
|
||||
} else if (ValueSourceGroupSelector.class.isAssignableFrom(c.getGroupSelector().getClass())) {
|
||||
@SuppressWarnings("unchecked")
|
||||
FirstPassGroupingCollector<MutableValue> collector = (FirstPassGroupingCollector<MutableValue>) c;
|
||||
Collection<SearchGroup<MutableValue>> mutableValueGroups = collector.getTopGroups(groupOffset);
|
||||
if (mutableValueGroups == null) {
|
||||
@ -406,6 +404,7 @@ public class TestGrouping extends LuceneTestCase {
|
||||
};
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked","rawtypes"})
|
||||
private Comparable<?>[] fillFields(GroupDoc d, Sort sort) {
|
||||
final SortField[] sortFields = sort.getSort();
|
||||
final Comparable<?>[] fields = new Comparable[sortFields.length];
|
||||
@ -491,7 +490,7 @@ public class TestGrouping extends LuceneTestCase {
|
||||
final int limit = Math.min(groupOffset + topNGroups, groups.size());
|
||||
|
||||
final Comparator<GroupDoc> docSortComp = getComparator(docSort);
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@SuppressWarnings({"unchecked","rawtypes"})
|
||||
final GroupDocs<BytesRef>[] result = new GroupDocs[limit-groupOffset];
|
||||
int totalGroupedHitCount = 0;
|
||||
for(int idx=groupOffset;idx < limit;idx++) {
|
||||
@ -1043,7 +1042,7 @@ public class TestGrouping extends LuceneTestCase {
|
||||
}
|
||||
// Get block grouping result:
|
||||
sBlocks.search(query, c4);
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@SuppressWarnings({"unchecked","rawtypes"})
|
||||
final TopGroups<BytesRef> tempTopGroupsBlocks = (TopGroups<BytesRef>) c3.getTopGroups(docSort, groupOffset, docOffset, docOffset+docsPerGroup);
|
||||
final TopGroups<BytesRef> groupsResultBlocks;
|
||||
if (doAllGroups && tempTopGroupsBlocks != null) {
|
||||
@ -1199,7 +1198,7 @@ public class TestGrouping extends LuceneTestCase {
|
||||
|
||||
if (mergedTopGroups != null) {
|
||||
// Now 2nd pass:
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@SuppressWarnings({"unchecked","rawtypes"})
|
||||
final TopGroups<BytesRef>[] shardTopGroups = new TopGroups[subSearchers.length];
|
||||
for(int shardIDX=0;shardIDX<subSearchers.length;shardIDX++) {
|
||||
final TopGroupsCollector<?> secondPassCollector = createSecondPassCollector(firstPassGroupingCollectors.get(shardIDX),
|
||||
|
@ -2005,7 +2005,6 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
||||
|
||||
TopDocs hits = searcher.search(query, 10);
|
||||
assertEquals(1, hits.scoreDocs.length);
|
||||
@SuppressWarnings("deprecation")
|
||||
TokenStream stream = TokenSources.getAnyTokenStream(searcher.getIndexReader(), 0, FIELD_NAME, analyzer);
|
||||
if (random().nextBoolean()) {
|
||||
stream = new CachingTokenFilter(stream);//conceals detection of TokenStreamFromTermVector
|
||||
|
@ -107,7 +107,6 @@ public class ToParentBlockJoinSortField extends SortField {
|
||||
private FieldComparator<?> getStringComparator(int numHits) {
|
||||
return new FieldComparator.TermOrdValComparator(numHits, getField(), missingValue == STRING_LAST) {
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException {
|
||||
SortedSetDocValues sortedSet = DocValues.getSortedSet(context.reader(), field);
|
||||
@ -127,7 +126,6 @@ public class ToParentBlockJoinSortField extends SortField {
|
||||
|
||||
private FieldComparator<?> getIntComparator(int numHits) {
|
||||
return new FieldComparator.IntComparator(numHits, getField(), (Integer) missingValue) {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
|
||||
SortedNumericDocValues sortedNumeric = DocValues.getSortedNumeric(context.reader(), field);
|
||||
@ -146,7 +144,6 @@ public class ToParentBlockJoinSortField extends SortField {
|
||||
|
||||
private FieldComparator<?> getLongComparator(int numHits) {
|
||||
return new FieldComparator.LongComparator(numHits, getField(), (Long) missingValue) {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
|
||||
SortedNumericDocValues sortedNumeric = DocValues.getSortedNumeric(context.reader(), field);
|
||||
@ -165,7 +162,6 @@ public class ToParentBlockJoinSortField extends SortField {
|
||||
|
||||
private FieldComparator<?> getFloatComparator(int numHits) {
|
||||
return new FieldComparator.FloatComparator(numHits, getField(), (Float) missingValue) {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
|
||||
SortedNumericDocValues sortedNumeric = DocValues.getSortedNumeric(context.reader(), field);
|
||||
@ -190,7 +186,6 @@ public class ToParentBlockJoinSortField extends SortField {
|
||||
|
||||
private FieldComparator<?> getDoubleComparator(int numHits) {
|
||||
return new FieldComparator.DoubleComparator(numHits, getField(), (Double) missingValue) {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
|
||||
SortedNumericDocValues sortedNumeric = DocValues.getSortedNumeric(context.reader(), field);
|
||||
|
@ -108,7 +108,6 @@ public class TestBlockJoinSelector extends LuceneTestCase {
|
||||
: ((NumericDocValues) sdv).advanceExact(target);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testSortedSelector() throws IOException {
|
||||
final BitSet parents = new FixedBitSet(20);
|
||||
parents.set(0);
|
||||
@ -218,7 +217,6 @@ public class TestBlockJoinSelector extends LuceneTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void testNumericSelector() throws Exception {
|
||||
final BitSet parents = new FixedBitSet(20);
|
||||
parents.set(0);
|
||||
|
@ -31,18 +31,18 @@ repository ("Repository") in CVS modules ("Modules") and made available
|
||||
as downloadable archives ("Downloads").</p>
|
||||
|
||||
<ul>
|
||||
<li>Content may be structured and packaged into modules to
|
||||
<li>Content may be structured and packaged into modules to
|
||||
facilitate delivering, extending, and upgrading the Content. Typical
|
||||
modules may include plug-ins ("Plug-ins"), plug-in fragments
|
||||
("Fragments"), and features ("Features").</li>
|
||||
<li>Each Plug-in or Fragment may be packaged as a sub-directory or JAR (Java™ ARchive) in a directory named "plugins".</li>
|
||||
<li>A
|
||||
<li>Each Plug-in or Fragment may be packaged as a sub-directory or JAR (Java™ ARchive) in a directory named "plugins".</li>
|
||||
<li>A
|
||||
Feature is a bundle of one or more Plug-ins and/or Fragments and
|
||||
associated material. Each Feature may be packaged as a sub-directory in
|
||||
a directory named "features". Within a Feature, files named
|
||||
"feature.xml" may contain a list of the names and version numbers of
|
||||
the Plug-ins and/or Fragments associated with that Feature.</li>
|
||||
<li>Features
|
||||
<li>Features
|
||||
may also include other Features ("Included Features"). Within a
|
||||
Feature, files named "feature.xml" may contain a list of the names and
|
||||
version numbers of Included Features.</li>
|
||||
@ -57,13 +57,13 @@ directory of a Download or Module
|
||||
including, but not limited to the following locations:</p>
|
||||
|
||||
<ul>
|
||||
<li>The top-level (root) directory</li>
|
||||
<li>Plug-in and Fragment directories</li>
|
||||
<li>Inside Plug-ins and Fragments packaged as JARs</li>
|
||||
<li>Sub-directories of the directory named "src" of certain Plug-ins</li>
|
||||
<li>Feature directories</li>
|
||||
<li>The top-level (root) directory</li>
|
||||
<li>Plug-in and Fragment directories</li>
|
||||
<li>Inside Plug-ins and Fragments packaged as JARs</li>
|
||||
<li>Sub-directories of the directory named "src" of certain Plug-ins</li>
|
||||
<li>Feature directories</li>
|
||||
</ul>
|
||||
|
||||
|
||||
<p>Note: if a Feature made available by the Eclipse Foundation is
|
||||
installed using the Eclipse Update Manager, you must agree to a license
|
||||
("Feature Update License") during the
|
||||
@ -84,12 +84,12 @@ CONDITIONS. SOME OF THESE
|
||||
OTHER LICENSE AGREEMENTS MAY INCLUDE (BUT ARE NOT LIMITED TO):</p>
|
||||
|
||||
<ul>
|
||||
<li>Common Public License Version 1.0 (available at <a href="http://www.eclipse.org/legal/cpl-v10.html">http://www.eclipse.org/legal/cpl-v10.html</a>)</li>
|
||||
<li>Apache Software License 1.1 (available at <a href="http://www.apache.org/licenses/LICENSE">http://www.apache.org/licenses/LICENSE</a>)</li>
|
||||
<li>Apache Software License 2.0 (available at <a href="http://www.apache.org/licenses/LICENSE-2.0">http://www.apache.org/licenses/LICENSE-2.0</a>)</li>
|
||||
<li>IBM Public License 1.0 (available at <a href="http://oss.software.ibm.com/developerworks/opensource/license10.html">http://oss.software.ibm.com/developerworks/opensource/license10.html</a>)</li>
|
||||
<li>Metro Link Public License 1.00 (available at <a href="http://www.opengroup.org/openmotif/supporters/metrolink/license.html">http://www.opengroup.org/openmotif/supporters/metrolink/license.html</a>)</li>
|
||||
<li>Mozilla Public License Version 1.1 (available at <a href="http://www.mozilla.org/MPL/MPL-1.1.html">http://www.mozilla.org/MPL/MPL-1.1.html</a>)</li>
|
||||
<li>Common Public License Version 1.0 (available at <a href="http://www.eclipse.org/legal/cpl-v10.html">http://www.eclipse.org/legal/cpl-v10.html</a>)</li>
|
||||
<li>Apache Software License 1.1 (available at <a href="http://www.apache.org/licenses/LICENSE">http://www.apache.org/licenses/LICENSE</a>)</li>
|
||||
<li>Apache Software License 2.0 (available at <a href="http://www.apache.org/licenses/LICENSE-2.0">http://www.apache.org/licenses/LICENSE-2.0</a>)</li>
|
||||
<li>IBM Public License 1.0 (available at <a href="http://oss.software.ibm.com/developerworks/opensource/license10.html">http://oss.software.ibm.com/developerworks/opensource/license10.html</a>)</li>
|
||||
<li>Metro Link Public License 1.00 (available at <a href="http://www.opengroup.org/openmotif/supporters/metrolink/license.html">http://www.opengroup.org/openmotif/supporters/metrolink/license.html</a>)</li>
|
||||
<li>Mozilla Public License Version 1.1 (available at <a href="http://www.mozilla.org/MPL/MPL-1.1.html">http://www.mozilla.org/MPL/MPL-1.1.html</a>)</li>
|
||||
</ul>
|
||||
|
||||
<p>IT IS YOUR OBLIGATION TO READ AND ACCEPT ALL SUCH TERMS AND
|
||||
|
@ -10,8 +10,8 @@ THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS COMMON PUBLIC LICEN
|
||||
|
||||
a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
|
||||
b) in the case of each subsequent Contributor:
|
||||
i) changes to the Program, and
|
||||
ii) additions to the Program;
|
||||
i) changes to the Program, and
|
||||
ii) additions to the Program;
|
||||
where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
|
||||
|
||||
"Contributor" means any person or entity that distributes the Program.
|
||||
@ -28,24 +28,24 @@ where such changes and/or additions to the Program originate from and are distri
|
||||
|
||||
2. GRANT OF RIGHTS
|
||||
|
||||
a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
|
||||
a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
|
||||
b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
|
||||
c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
|
||||
d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
|
||||
c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
|
||||
d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
|
||||
3. REQUIREMENTS
|
||||
|
||||
A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
|
||||
|
||||
a) it complies with the terms and conditions of this Agreement; and
|
||||
b) its license agreement:
|
||||
i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
|
||||
a) it complies with the terms and conditions of this Agreement; and
|
||||
b) its license agreement:
|
||||
i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
|
||||
ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
|
||||
iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
|
||||
iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
|
||||
iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
|
||||
iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
|
||||
When the Program is made available in source code form:
|
||||
|
||||
a) it must be made available under this Agreement; and
|
||||
b) a copy of this Agreement must be included with each copy of the Program.
|
||||
a) it must be made available under this Agreement; and
|
||||
b) a copy of this Agreement must be included with each copy of the Program.
|
||||
|
||||
Contributors may not remove or alter any copyright notices contained within the Program.
|
||||
|
||||
|
@ -335,7 +335,6 @@ public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
|
||||
reader.close();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
private Allocator randomByteBlockAllocator() {
|
||||
if (random().nextBoolean()) {
|
||||
return new RecyclingByteBlockAllocator();
|
||||
@ -378,7 +377,6 @@ public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
|
||||
}
|
||||
|
||||
// LUCENE-3831
|
||||
@SuppressWarnings("resource")
|
||||
public void testNullPointerException() throws IOException {
|
||||
RegexpQuery regex = new RegexpQuery(new Term("field", "worl."));
|
||||
SpanQuery wrappedquery = new SpanMultiTermQueryWrapper<>(regex);
|
||||
@ -392,7 +390,6 @@ public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
|
||||
}
|
||||
|
||||
// LUCENE-3831
|
||||
@SuppressWarnings("resource")
|
||||
public void testPassesIfWrapped() throws IOException {
|
||||
RegexpQuery regex = new RegexpQuery(new Term("field", "worl."));
|
||||
SpanQuery wrappedquery = new SpanOrQuery(new SpanMultiTermQueryWrapper<>(regex));
|
||||
|
@ -83,7 +83,6 @@ public final class UpToTwoPositiveIntOutputs extends Outputs<Object> {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private final static Long NO_OUTPUT = new Long(0);
|
||||
|
||||
private final boolean doShare;
|
||||
|
@ -23,7 +23,6 @@ import org.apache.lucene.search.similarities.BM25Similarity;
|
||||
import org.apache.lucene.search.similarities.BaseSimilarityTestCase;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestLegacyBM25Similarity extends BaseSimilarityTestCase {
|
||||
|
||||
public void testIllegalK1() {
|
||||
|
@ -42,7 +42,6 @@ import org.apache.lucene.search.SortField;
|
||||
*
|
||||
*
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "rawtypes", "deprecation"})
|
||||
public abstract class ValueSource {
|
||||
|
||||
/**
|
||||
|
@ -144,7 +144,6 @@ public class DocFreqValueSource extends ValueSource {
|
||||
return name() + '(' + field + ',' + val + ')';
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
|
||||
IndexSearcher searcher = (IndexSearcher)context.get("searcher");
|
||||
@ -152,7 +151,6 @@ public class DocFreqValueSource extends ValueSource {
|
||||
return new ConstIntDocValues(docfreq, this);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@Override
|
||||
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
|
||||
context.put("searcher",searcher);
|
||||
|
@ -40,13 +40,11 @@ public class MaxDocValueSource extends ValueSource {
|
||||
return name() + "()";
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@Override
|
||||
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
|
||||
context.put("searcher",searcher);
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
|
||||
IndexSearcher searcher = (IndexSearcher)context.get("searcher");
|
||||
|
@ -54,13 +54,11 @@ public class NormValueSource extends ValueSource {
|
||||
return name() + '(' + field + ')';
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@Override
|
||||
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
|
||||
context.put("searcher",searcher);
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
|
||||
IndexSearcher searcher = (IndexSearcher)context.get("searcher");
|
||||
|
@ -36,7 +36,6 @@ import org.apache.lucene.util.mutable.MutableValueFloat;
|
||||
/**
|
||||
* <code>QueryValueSource</code> returns the relevance score of the query
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public class QueryValueSource extends ValueSource {
|
||||
final Query q;
|
||||
final float defVal;
|
||||
|
@ -59,7 +59,6 @@ public class ScaleFloatFunction extends ValueSource {
|
||||
float maxVal;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
private ScaleInfo createScaleInfo(Map context, LeafReaderContext readerContext) throws IOException {
|
||||
final List<LeafReaderContext> leaves = ReaderUtil.getTopLevelContext(readerContext).leaves();
|
||||
|
||||
@ -100,7 +99,6 @@ public class ScaleFloatFunction extends ValueSource {
|
||||
return scaleInfo;
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
|
||||
|
||||
@ -134,7 +132,6 @@ public class ScaleFloatFunction extends ValueSource {
|
||||
};
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
|
||||
source.createWeight(context, searcher);
|
||||
|
@ -47,13 +47,11 @@ public class SumTotalTermFreqValueSource extends ValueSource {
|
||||
return name() + '(' + indexedField + ')';
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
|
||||
return (FunctionValues)context.get(this);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
@Override
|
||||
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
|
||||
long sumTotalTermFreq = 0;
|
||||
|
@ -54,13 +54,11 @@ public class TotalTermFreqValueSource extends ValueSource {
|
||||
return name() + '(' + field + ',' + val + ')';
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
|
||||
return (FunctionValues)context.get(this);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
@Override
|
||||
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
|
||||
long totalTermFreq = 0;
|
||||
|
@ -210,7 +210,6 @@ public class TestPayloadCheckQuery extends LuceneTestCase {
|
||||
assertFalse(query3.equals(query4));
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void testRewrite() throws IOException {
|
||||
SpanMultiTermQueryWrapper fiv = new SpanMultiTermQueryWrapper(new WildcardQuery(new Term("field", "fiv*")));
|
||||
SpanMultiTermQueryWrapper hund = new SpanMultiTermQueryWrapper(new WildcardQuery(new Term("field", "hund*")));
|
||||
|
@ -109,12 +109,7 @@ public final class FastCharStream implements CharStream {
|
||||
public final int getColumn() {
|
||||
return bufferStart + bufferPosition;
|
||||
}
|
||||
/**
|
||||
* @deprecated use getEndLine
|
||||
* @see #getEndLine
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public final int getLine() {
|
||||
return 1;
|
||||
}
|
||||
|
@ -65,7 +65,6 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
|
||||
|
||||
protected String field;
|
||||
int phraseSlop = 0;
|
||||
@SuppressWarnings("deprecation")
|
||||
float fuzzyMinSim = FuzzyQuery.defaultMinSimilarity;
|
||||
int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;
|
||||
Locale locale = Locale.getDefault();
|
||||
@ -589,7 +588,6 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
|
||||
* @param prefixLength prefix length
|
||||
* @return new FuzzyQuery Instance
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) {
|
||||
// FuzzyQuery doesn't yet allow constant score rewrite
|
||||
String text = term.text();
|
||||
|
@ -36,7 +36,6 @@ public class FuzzyQueryNodeBuilder implements StandardQueryBuilder {
|
||||
FuzzyQueryNode fuzzyNode = (FuzzyQueryNode) queryNode;
|
||||
String text = fuzzyNode.getTextAsString();
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
int numEdits = FuzzyQuery.floatToEdits(fuzzyNode.getSimilarity(),
|
||||
text.codePointCount(0, text.length()));
|
||||
|
||||
|
@ -25,7 +25,6 @@ public class FuzzyConfig {
|
||||
|
||||
private int prefixLength = FuzzyQuery.defaultPrefixLength;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private float minSimilarity = FuzzyQuery.defaultMinSimilarity;
|
||||
|
||||
public FuzzyConfig() {}
|
||||
|
@ -106,21 +106,10 @@ public final class FastCharStream implements CharStream {
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
/**
|
||||
* Returns the column position of the character last read.
|
||||
* @deprecated
|
||||
* @see #getEndColumn
|
||||
*/
|
||||
public final int getColumn() {
|
||||
return bufferStart + bufferPosition;
|
||||
}
|
||||
@Override
|
||||
@Deprecated
|
||||
/**
|
||||
* @deprecated
|
||||
* @see #getEndLine
|
||||
*/
|
||||
public final int getLine() {
|
||||
return 1;
|
||||
}
|
||||
|
@ -105,21 +105,10 @@ public final class FastCharStream implements CharStream {
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
/**
|
||||
* @deprecated
|
||||
* @see #getEndColumn
|
||||
*/
|
||||
public final int getColumn() {
|
||||
return bufferStart + bufferPosition;
|
||||
}
|
||||
@Override
|
||||
@Deprecated
|
||||
/**
|
||||
* Returns the line number of the character last read.
|
||||
* @deprecated
|
||||
* @see #getEndLine
|
||||
*/
|
||||
public final int getLine() {
|
||||
return 1;
|
||||
}
|
||||
|
@ -33,7 +33,6 @@ import org.w3c.dom.NodeList;
|
||||
public class FuzzyLikeThisQueryBuilder implements QueryBuilder {
|
||||
|
||||
private static final int DEFAULT_MAX_NUM_TERMS = 50;
|
||||
@SuppressWarnings("deprecation")
|
||||
private static final float DEFAULT_MIN_SIMILARITY = FuzzyQuery.defaultMinSimilarity;
|
||||
private static final int DEFAULT_PREFIX_LENGTH = 1;
|
||||
private static final boolean DEFAULT_IGNORE_TF = false;
|
||||
|
@ -27,9 +27,8 @@ import org.apache.lucene.queryparser.flexible.core.util.UnescapedCharSequence;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestQueryTreeBuilder extends LuceneTestCase {
|
||||
|
||||
|
||||
@Test
|
||||
public void testSetFieldBuilder() throws QueryNodeException {
|
||||
QueryTreeBuilder qtb = new QueryTreeBuilder();
|
||||
|
@ -51,7 +51,6 @@ public abstract class ReplicatorTestCase extends LuceneTestCase {
|
||||
* Returns a new {@link Server HTTP Server} instance. To obtain its port, use
|
||||
* {@link #serverPort(Server)}.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public static synchronized Server newHttpServer(Handler handler) throws Exception {
|
||||
// if this property is true, then jetty will be configured to use SSL
|
||||
// leveraging the same system properties as java to specify
|
||||
|
@ -114,7 +114,6 @@ public class SimpleWKTShapeParser {
|
||||
}
|
||||
|
||||
/** Parses a list of points into latitude and longitude arraylists */
|
||||
@SuppressWarnings("rawtypes")
|
||||
private static void parseCoordinates(StreamTokenizer stream, ArrayList lats, ArrayList lons)
|
||||
throws IOException, ParseException {
|
||||
boolean isOpenParen = false;
|
||||
@ -138,7 +137,6 @@ public class SimpleWKTShapeParser {
|
||||
}
|
||||
|
||||
/** parses a single coordinate, w/ optional 3rd dimension */
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
private static void parseCoordinate(StreamTokenizer stream, ArrayList lats, ArrayList lons)
|
||||
throws IOException, ParseException {
|
||||
lons.add(nextNumber(stream));
|
||||
@ -154,8 +152,8 @@ public class SimpleWKTShapeParser {
|
||||
if (token.equals(EMPTY)) {
|
||||
return null;
|
||||
}
|
||||
ArrayList<Double> lats = new ArrayList<>();
|
||||
ArrayList<Double> lons = new ArrayList<>();
|
||||
ArrayList<Double> lats = new ArrayList();
|
||||
ArrayList<Double> lons = new ArrayList();
|
||||
parseCoordinates(stream, lats, lons);
|
||||
double[][] result = new double[lats.size()][2];
|
||||
for (int i = 0; i < lats.size(); ++i) {
|
||||
@ -170,8 +168,8 @@ public class SimpleWKTShapeParser {
|
||||
if (token.equals(EMPTY)) {
|
||||
return null;
|
||||
}
|
||||
ArrayList<Double> lats = new ArrayList<>();
|
||||
ArrayList<Double> lons = new ArrayList<>();
|
||||
ArrayList<Double> lats = new ArrayList();
|
||||
ArrayList<Double> lons = new ArrayList();
|
||||
parseCoordinates(stream, lats, lons);
|
||||
return new Line(lats.stream().mapToDouble(i->i).toArray(), lons.stream().mapToDouble(i->i).toArray());
|
||||
}
|
||||
@ -182,7 +180,7 @@ public class SimpleWKTShapeParser {
|
||||
if (token.equals(EMPTY)) {
|
||||
return null;
|
||||
}
|
||||
ArrayList<Line> lines = new ArrayList<>();
|
||||
ArrayList<Line> lines = new ArrayList();
|
||||
lines.add(parseLine(stream));
|
||||
while (nextCloserOrComma(stream).equals(COMMA)) {
|
||||
lines.add(parseLine(stream));
|
||||
@ -191,7 +189,6 @@ public class SimpleWKTShapeParser {
|
||||
}
|
||||
|
||||
/** parses the hole of a polygon */
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
private static Polygon parsePolygonHole(StreamTokenizer stream) throws IOException, ParseException {
|
||||
ArrayList<Double> lats = new ArrayList();
|
||||
ArrayList<Double> lons = new ArrayList();
|
||||
@ -200,7 +197,6 @@ public class SimpleWKTShapeParser {
|
||||
}
|
||||
|
||||
/** parses a POLYGON */
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
private static Polygon parsePolygon(StreamTokenizer stream) throws IOException, ParseException {
|
||||
if (nextEmptyOrOpen(stream).equals(EMPTY)) {
|
||||
return null;
|
||||
@ -226,7 +222,7 @@ public class SimpleWKTShapeParser {
|
||||
if (token.equals(EMPTY)) {
|
||||
return null;
|
||||
}
|
||||
ArrayList<Polygon> polygons = new ArrayList<>();
|
||||
ArrayList<Polygon> polygons = new ArrayList();
|
||||
polygons.add(parsePolygon(stream));
|
||||
while (nextCloserOrComma(stream).equals(COMMA)) {
|
||||
polygons.add(parsePolygon(stream));
|
||||
|
@ -133,7 +133,6 @@ public abstract class SpatialStrategy {
|
||||
* scores will be 1 for indexed points at the center of the query shape and as
|
||||
* low as ~0.1 at its furthest edges.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public final DoubleValuesSource makeRecipDistanceValueSource(Shape queryShape) {
|
||||
Rectangle bbox = queryShape.getBoundingBox();
|
||||
double diagonalDist = ctx.getDistCalc().distance(
|
||||
|
@ -45,7 +45,6 @@ class BBoxValueSource extends ShapeValuesSource {
|
||||
return "bboxShape(" + strategy.getFieldName() + ")";
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public ShapeValues getValues(LeafReaderContext readerContext) throws IOException {
|
||||
LeafReader reader = readerContext.reader();
|
||||
|
@ -88,7 +88,6 @@ public class HeatmapFacetCounter {
|
||||
* @param facetLevel the target depth (detail) of cells.
|
||||
* @param maxCells the maximum number of cells to return. If the cells exceed this count, an
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public static Heatmap calcFacets(PrefixTreeStrategy strategy, IndexReaderContext context, Bits topAcceptDocs,
|
||||
Shape inputShape, final int facetLevel, int maxCells) throws IOException {
|
||||
if (maxCells > (MAX_ROWS_OR_COLUMNS * MAX_ROWS_OR_COLUMNS)) {
|
||||
|
@ -102,7 +102,6 @@ public class WithinPrefixTreeQuery extends AbstractVisitingPrefixTreeQuery {
|
||||
/** Returns a new shape that is larger than shape by at distErr.
|
||||
*/
|
||||
//TODO move this generic code elsewhere? Spatial4j?
|
||||
@SuppressWarnings("deprecation")
|
||||
protected Shape bufferShape(Shape shape, double distErr) {
|
||||
if (distErr <= 0)
|
||||
throw new IllegalArgumentException("distErr must be > 0");
|
||||
|
@ -84,7 +84,6 @@ public class PackedQuadPrefixTree extends QuadPrefixTree {
|
||||
return new PackedQuadCell(0x0L);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Cell getCell(Point p, int level) {
|
||||
if (!robust) { // old method
|
||||
@ -145,7 +144,6 @@ public class PackedQuadPrefixTree extends QuadPrefixTree {
|
||||
checkBattenbergNotRobustly(QUAD[3], x + w, y - h, level, matches, term, shape, maxLevel);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected void checkBattenbergNotRobustly(byte quad, double cx, double cy, int level, List<Cell> matches,
|
||||
long term, Shape shape, int maxLevel) {
|
||||
// short-circuit if we find a match for the point (no need to continue recursion)
|
||||
|
@ -138,7 +138,6 @@ public class QuadPrefixTree extends LegacyPrefixTree {
|
||||
return maxLevels;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Cell getCell(Point p, int level) {
|
||||
if (!robust) { // old method
|
||||
@ -209,7 +208,6 @@ public class QuadPrefixTree extends LegacyPrefixTree {
|
||||
// if we actually use the range property in the query, this could be useful
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected void checkBattenbergNotRobustly(
|
||||
char c,
|
||||
double cx,
|
||||
@ -323,7 +321,6 @@ public class QuadPrefixTree extends LegacyPrefixTree {
|
||||
return shape;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected Rectangle makeShape() {
|
||||
BytesRef token = getTokenBytesNoLeaf(null);
|
||||
double xmin = QuadPrefixTree.this.xmin;
|
||||
|
@ -112,7 +112,6 @@ public class SpatialArgsParser {
|
||||
args.setDistErr(readDouble(nameValPairs.remove(DIST_ERR)));
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected Shape parseShape(String str, SpatialContext ctx) throws ParseException {
|
||||
//return ctx.readShape(str);//still in Spatial4j 0.4 but will be deleted
|
||||
return ctx.readShapeFromWkt(str);
|
||||
|
@ -66,6 +66,7 @@ public class Geo3dShapeFactory implements S2ShapeFactory {
|
||||
private static final double DEFAULT_CIRCLE_ACCURACY = 1e-4;
|
||||
private double circleAccuracy = DEFAULT_CIRCLE_ACCURACY;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public Geo3dShapeFactory(SpatialContext context, SpatialContextFactory factory) {
|
||||
this.context = context;
|
||||
this.planetModel = ((Geo3dSpatialContextFactory) factory).planetModel;
|
||||
@ -196,7 +197,6 @@ public class Geo3dShapeFactory implements S2ShapeFactory {
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated // use a builder
|
||||
public Shape lineString(List<Point> list, double distance) {
|
||||
LineStringBuilder builder = lineString();
|
||||
for (Point point : list) {
|
||||
@ -207,7 +207,6 @@ public class Geo3dShapeFactory implements S2ShapeFactory {
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated // use a builder
|
||||
public <S extends Shape> ShapeCollection<S> multiShape(List<S> list) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@ -327,6 +326,7 @@ public class Geo3dShapeFactory implements S2ShapeFactory {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Shape build() {
|
||||
GeoPolygonFactory.PolygonDescription description = new GeoPolygonFactory.PolygonDescription(points, polyHoles);
|
||||
|
@ -36,7 +36,6 @@ import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class DistanceStrategyTest extends StrategyTestCase {
|
||||
@ParametersFactory(argumentFormatting = "strategy=%s")
|
||||
public static Iterable<Object[]> parameters() {
|
||||
|
@ -40,7 +40,6 @@ import org.locationtech.spatial4j.shape.Shape;
|
||||
/**
|
||||
* Based off of Solr 3's SpatialFilterTest.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class PortedSolr3Test extends StrategyTestCase {
|
||||
|
||||
@ParametersFactory(argumentFormatting = "strategy=%s")
|
||||
|
@ -102,13 +102,11 @@ public class QueryEqualsHashCodeTest extends LuceneTestCase {
|
||||
assertTrue(first.hashCode() != second.hashCode());
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private SpatialArgs makeArgs1() {
|
||||
final Shape shape1 = ctx.makeRectangle(0, 0, 10, 10);
|
||||
return new SpatialArgs(predicate, shape1);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private SpatialArgs makeArgs2() {
|
||||
final Shape shape2 = ctx.makeRectangle(0, 0, 20, 20);
|
||||
return new SpatialArgs(predicate, shape2);
|
||||
|
@ -23,7 +23,6 @@ import org.junit.Test;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class SpatialArgsTest extends LuceneTestCase {
|
||||
|
||||
@Test
|
||||
|
@ -53,7 +53,6 @@ import org.locationtech.spatial4j.shape.Shape;
|
||||
* This class serves as example code to show how to use the Lucene spatial
|
||||
* module.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class SpatialExample extends LuceneTestCase {
|
||||
|
||||
//Note: Test invoked via TestTestFramework.spatialExample()
|
||||
|
@ -119,7 +119,6 @@ public abstract class SpatialTestCase extends LuceneTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected Point randomPoint() {
|
||||
final Rectangle WB = ctx.getWorldBounds();
|
||||
return ctx.makePoint(
|
||||
@ -131,7 +130,6 @@ public abstract class SpatialTestCase extends LuceneTestCase {
|
||||
return randomRectangle(ctx.getWorldBounds());
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected Rectangle randomRectangle(Rectangle bounds) {
|
||||
double[] xNewStartAndWidth = randomSubRange(bounds.getMinX(), bounds.getWidth());
|
||||
double xMin = xNewStartAndWidth[0];
|
||||
@ -149,7 +147,6 @@ public abstract class SpatialTestCase extends LuceneTestCase {
|
||||
}
|
||||
|
||||
/** Returns new minStart and new length that is inside the range specified by the arguments. */
|
||||
@SuppressWarnings("deprecation")
|
||||
protected double[] randomSubRange(double boundStart, double boundLen) {
|
||||
if (boundLen >= 3 && usually()) { // typical
|
||||
// prefer integers for ease of debugability ... and prefer 1/16th of bound
|
||||
|
@ -41,7 +41,6 @@ public class SpatialTestData {
|
||||
* an "id", a "name" and the "shape". Empty lines and lines starting with a '#' are skipped.
|
||||
* The stream is closed.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public static Iterator<SpatialTestData> getTestData(InputStream in, SpatialContext ctx) throws IOException {
|
||||
List<SpatialTestData> results = new ArrayList<>();
|
||||
BufferedReader bufInput = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
|
||||
|
@ -184,7 +184,6 @@ public abstract class StrategyTestCase extends SpatialTestCase {
|
||||
return strategy.makeQuery(q.args);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected void adoc(String id, String shapeStr) throws IOException, ParseException {
|
||||
Shape shape = shapeStr==null ? null : ctx.readShapeFromWkt(shapeStr);
|
||||
addDocument(newDoc(id, shape));
|
||||
|
@ -36,7 +36,6 @@ import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.impl.RectangleImpl;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestBBoxStrategy extends RandomSpatialOpStrategyTestCase {
|
||||
|
||||
@Override
|
||||
|
@ -116,7 +116,6 @@ public class CompositeStrategyTest extends RandomSpatialOpStrategyTestCase {
|
||||
}
|
||||
|
||||
//TODO move up
|
||||
@SuppressWarnings("deprecation")
|
||||
private Shape randomCircle() {
|
||||
final Point point = randomPoint();
|
||||
//TODO pick using gaussian
|
||||
|
@ -38,7 +38,6 @@ public class DateNRStrategyTest extends RandomSpatialOpStrategyTestCase {
|
||||
|
||||
long randomCalWindowMs;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
@ -43,7 +43,6 @@ import org.locationtech.spatial4j.shape.impl.RectangleImpl;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.atMost;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class HeatmapFacetCounterTest extends StrategyTestCase {
|
||||
|
||||
SpatialPrefixTree grid;
|
||||
|
@ -37,7 +37,6 @@ import org.junit.Test;
|
||||
import java.text.ParseException;
|
||||
import java.util.HashMap;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class JtsPolygonTest extends StrategyTestCase {
|
||||
|
||||
private static final double LUCENE_4464_distErrPct = SpatialArgs.DEFAULT_DISTERRPCT;//DEFAULT 2.5%
|
||||
|
@ -45,7 +45,6 @@ import org.locationtech.spatial4j.shape.Shape;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class NumberRangeFacetsTest extends StrategyTestCase {
|
||||
|
||||
DateRangePrefixTree tree;
|
||||
|
@ -63,7 +63,6 @@ import static org.locationtech.spatial4j.shape.SpatialRelation.WITHIN;
|
||||
|
||||
/** Randomized PrefixTree test that considers the fuzziness of the
|
||||
* results introduced by grid approximation. */
|
||||
@SuppressWarnings("deprecation")
|
||||
public class RandomSpatialOpFuzzyPrefixTreeTest extends StrategyTestCase {
|
||||
|
||||
static final int ITERATIONS = 10;
|
||||
|
@ -34,7 +34,6 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween
|
||||
/** Base test harness, ideally for SpatialStrategy impls that have exact results
|
||||
* (not grid approximated), hence "not fuzzy".
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public abstract class RandomSpatialOpStrategyTestCase extends StrategyTestCase {
|
||||
|
||||
//Note: this is partially redundant with StrategyTestCase.runTestQuery & testOperation
|
||||
|
@ -31,7 +31,6 @@ import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestRecursivePrefixTreeStrategy extends StrategyTestCase {
|
||||
|
||||
private int maxLength;
|
||||
|
@ -30,9 +30,9 @@ import org.junit.Test;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
|
||||
public class TestTermQueryPrefixGridStrategy extends SpatialTestCase {
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
public void testNGramPrefixGridLosAngeles() throws IOException {
|
||||
SpatialContext ctx = SpatialContext.GEO;
|
||||
|
@ -79,7 +79,6 @@ public class SpatialPrefixTreeTest extends SpatialTestCase {
|
||||
* A PrefixTree pruning optimization gone bad, applicable when optimize=true.
|
||||
* See <a href="https://issues.apache.org/jira/browse/LUCENE-4770">LUCENE-4770</a>.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
public void testBadPrefixTreePrune() throws Exception {
|
||||
|
||||
|
@ -46,7 +46,6 @@ import org.locationtech.spatial4j.shape.Shape;
|
||||
|
||||
import static org.locationtech.spatial4j.distance.DistanceUtils.DEGREES_TO_RADIANS;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class Geo3dRptTest extends RandomSpatialOpStrategyTestCase {
|
||||
|
||||
private PlanetModel planetModel;
|
||||
|
@ -66,7 +66,6 @@ public class Geo3dShapeSphereModelRectRelationTest extends ShapeRectRelationTest
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes", "deprecation"})
|
||||
@Test
|
||||
public void testFailure2_LUCENE6475() {
|
||||
GeoCircle geo3dCircle = GeoCircleFactory.makeGeoCircle(planetModel, 1.6282053147165243E-4 * RADIANS_PER_DEGREE,
|
||||
|
@ -36,7 +36,6 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.*;
|
||||
* A base test class with utility methods to help test shapes.
|
||||
* Extends from RandomizedTest.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public abstract class RandomizedShapeTestCase extends LuceneTestCase {
|
||||
|
||||
protected static final double EPS = 10e-9;
|
||||
@ -56,6 +55,7 @@ public abstract class RandomizedShapeTestCase extends LuceneTestCase {
|
||||
this.ctx = ctx;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static void checkShapesImplementEquals( Class<?>[] classes ) {
|
||||
for( Class<?> clazz : classes ) {
|
||||
try {
|
||||
|
@ -33,7 +33,6 @@ import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestPointVectorStrategy extends StrategyTestCase {
|
||||
|
||||
@Before
|
||||
|
@ -23,6 +23,7 @@ import java.util.Set;
|
||||
|
||||
import org.apache.lucene.search.suggest.InputIterator;
|
||||
import org.apache.lucene.search.suggest.Lookup;
|
||||
import org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode;
|
||||
import org.apache.lucene.store.DataInput;
|
||||
import org.apache.lucene.store.DataOutput;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
@ -38,7 +39,6 @@ import org.apache.lucene.util.CharsRefBuilder;
|
||||
* @deprecated Migrate to one of the newer suggesters which are much more RAM efficient.
|
||||
*/
|
||||
@Deprecated
|
||||
@SuppressWarnings("deprecation")
|
||||
public class JaspellLookup extends Lookup implements Accountable {
|
||||
JaspellTernarySearchTrie trie = new JaspellTernarySearchTrie();
|
||||
private boolean usePrefix = true;
|
||||
@ -140,52 +140,52 @@ public class JaspellLookup extends Lookup implements Accountable {
|
||||
private static final byte HI_KID = 0x04;
|
||||
private static final byte HAS_VALUE = 0x08;
|
||||
|
||||
private void readRecursively(DataInput in, org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode node) throws IOException {
|
||||
private void readRecursively(DataInput in, TSTNode node) throws IOException {
|
||||
node.splitchar = in.readString().charAt(0);
|
||||
byte mask = in.readByte();
|
||||
if ((mask & HAS_VALUE) != 0) {
|
||||
node.data = Long.valueOf(in.readLong());
|
||||
}
|
||||
if ((mask & LO_KID) != 0) {
|
||||
org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode kid = new org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode('\0', node);
|
||||
node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.LOKID] = kid;
|
||||
TSTNode kid = new TSTNode('\0', node);
|
||||
node.relatives[TSTNode.LOKID] = kid;
|
||||
readRecursively(in, kid);
|
||||
}
|
||||
if ((mask & EQ_KID) != 0) {
|
||||
org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode kid = new org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode('\0', node);
|
||||
node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.EQKID] = kid;
|
||||
TSTNode kid = new TSTNode('\0', node);
|
||||
node.relatives[TSTNode.EQKID] = kid;
|
||||
readRecursively(in, kid);
|
||||
}
|
||||
if ((mask & HI_KID) != 0) {
|
||||
org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode kid = new org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode('\0', node);
|
||||
node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.HIKID] = kid;
|
||||
TSTNode kid = new TSTNode('\0', node);
|
||||
node.relatives[TSTNode.HIKID] = kid;
|
||||
readRecursively(in, kid);
|
||||
}
|
||||
}
|
||||
|
||||
private void writeRecursively(DataOutput out, org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode node) throws IOException {
|
||||
private void writeRecursively(DataOutput out, TSTNode node) throws IOException {
|
||||
if (node == null) {
|
||||
return;
|
||||
}
|
||||
out.writeString(new String(new char[] {node.splitchar}, 0, 1));
|
||||
byte mask = 0;
|
||||
if (node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.LOKID] != null) mask |= LO_KID;
|
||||
if (node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.EQKID] != null) mask |= EQ_KID;
|
||||
if (node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.HIKID] != null) mask |= HI_KID;
|
||||
if (node.relatives[TSTNode.LOKID] != null) mask |= LO_KID;
|
||||
if (node.relatives[TSTNode.EQKID] != null) mask |= EQ_KID;
|
||||
if (node.relatives[TSTNode.HIKID] != null) mask |= HI_KID;
|
||||
if (node.data != null) mask |= HAS_VALUE;
|
||||
out.writeByte(mask);
|
||||
if (node.data != null) {
|
||||
out.writeLong(((Number)node.data).longValue());
|
||||
}
|
||||
writeRecursively(out, node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.LOKID]);
|
||||
writeRecursively(out, node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.EQKID]);
|
||||
writeRecursively(out, node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.HIKID]);
|
||||
writeRecursively(out, node.relatives[TSTNode.LOKID]);
|
||||
writeRecursively(out, node.relatives[TSTNode.EQKID]);
|
||||
writeRecursively(out, node.relatives[TSTNode.HIKID]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean store(DataOutput output) throws IOException {
|
||||
output.writeVLong(count);
|
||||
org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode root = trie.getRoot();
|
||||
TSTNode root = trie.getRoot();
|
||||
if (root == null) { // empty tree
|
||||
return false;
|
||||
}
|
||||
@ -196,7 +196,7 @@ public class JaspellLookup extends Lookup implements Accountable {
|
||||
@Override
|
||||
public boolean load(DataInput input) throws IOException {
|
||||
count = input.readVLong();
|
||||
org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode root = new org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode('\0', null);
|
||||
TSTNode root = new TSTNode('\0', null);
|
||||
readRecursively(input, root);
|
||||
trie.setRoot(root);
|
||||
return true;
|
||||
|
@ -38,7 +38,6 @@ import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestWordBreakSpellChecker extends LuceneTestCase {
|
||||
private Directory dir;
|
||||
private Analyzer analyzer;
|
||||
|
@ -40,6 +40,7 @@ import org.apache.lucene.search.suggest.analyzing.FreeTextSuggester;
|
||||
import org.apache.lucene.search.suggest.analyzing.FuzzySuggester;
|
||||
import org.apache.lucene.search.suggest.fst.FSTCompletionLookup;
|
||||
import org.apache.lucene.search.suggest.fst.WFSTCompletionLookup;
|
||||
import org.apache.lucene.search.suggest.jaspell.JaspellLookup;
|
||||
import org.apache.lucene.search.suggest.tst.TSTLookup;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
@ -52,12 +53,12 @@ import org.junit.Ignore;
|
||||
*/
|
||||
@Ignore("COMMENT ME TO RUN BENCHMARKS!")
|
||||
public class LookupBenchmarkTest extends LuceneTestCase {
|
||||
@SuppressWarnings("deprecation")
|
||||
@SuppressWarnings("unchecked")
|
||||
private final List<Class<? extends Lookup>> benchmarkClasses = Arrays.asList(
|
||||
FuzzySuggester.class,
|
||||
AnalyzingSuggester.class,
|
||||
AnalyzingInfixSuggester.class,
|
||||
org.apache.lucene.search.suggest.jaspell.JaspellLookup.class,
|
||||
JaspellLookup.class,
|
||||
TSTLookup.class,
|
||||
FSTCompletionLookup.class,
|
||||
WFSTCompletionLookup.class,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user