SOLR-13952: reverting Erick's commit (with permission).

This commit is contained in:
Dawid Weiss 2019-11-25 17:56:20 +01:00
parent b5fd6d7b22
commit 063c82ebd6
296 changed files with 538 additions and 861 deletions

View File

@ -37,7 +37,6 @@ public class FixBrokenOffsetsFilterFactory extends TokenFilterFactory {
super(args);
}
@SuppressWarnings("deprecation")
@Override
public TokenStream create(TokenStream input) {
return new FixBrokenOffsetsFilter(input);

View File

@ -33,6 +33,8 @@ import org.apache.lucene.analysis.util.ResourceLoaderAware;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.search.PhraseQuery;
import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.*;
/**
* Factory for {@link WordDelimiterFilter}.
* <pre class="prettyprint">
@ -74,31 +76,31 @@ public class WordDelimiterFilterFactory extends TokenFilterFactory implements Re
super(args);
int flags = 0;
if (getInt(args, "generateWordParts", 1) != 0) {
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_WORD_PARTS;
flags |= GENERATE_WORD_PARTS;
}
if (getInt(args, "generateNumberParts", 1) != 0) {
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_NUMBER_PARTS;
flags |= GENERATE_NUMBER_PARTS;
}
if (getInt(args, "catenateWords", 0) != 0) {
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_WORDS;
flags |= CATENATE_WORDS;
}
if (getInt(args, "catenateNumbers", 0) != 0) {
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_NUMBERS;
flags |= CATENATE_NUMBERS;
}
if (getInt(args, "catenateAll", 0) != 0) {
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_ALL;
flags |= CATENATE_ALL;
}
if (getInt(args, "splitOnCaseChange", 1) != 0) {
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_CASE_CHANGE;
flags |= SPLIT_ON_CASE_CHANGE;
}
if (getInt(args, "splitOnNumerics", 1) != 0) {
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_NUMERICS;
flags |= SPLIT_ON_NUMERICS;
}
if (getInt(args, "preserveOriginal", 0) != 0) {
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.PRESERVE_ORIGINAL;
flags |= PRESERVE_ORIGINAL;
}
if (getInt(args, "stemEnglishPossessive", 1) != 0) {
flags |= org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE;
flags |= STEM_ENGLISH_POSSESSIVE;
}
wordFiles = get(args, PROTECTED_TOKENS);
types = get(args, TYPES);
@ -160,17 +162,17 @@ public class WordDelimiterFilterFactory extends TokenFilterFactory implements Re
private Byte parseType(String s) {
if (s.equals("LOWER"))
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.LOWER;
return LOWER;
else if (s.equals("UPPER"))
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.UPPER;
return UPPER;
else if (s.equals("ALPHA"))
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.ALPHA;
return ALPHA;
else if (s.equals("DIGIT"))
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.DIGIT;
return DIGIT;
else if (s.equals("ALPHANUM"))
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.ALPHANUM;
return ALPHANUM;
else if (s.equals("SUBWORD_DELIM"))
return org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SUBWORD_DELIM;
return SUBWORD_DELIM;
else
return null;
}

View File

@ -210,7 +210,6 @@ public class SynonymMap {
/**
* Builds an {@link SynonymMap} and returns it.
*/
@SuppressWarnings("deprecation")
public SynonymMap build() throws IOException {
ByteSequenceOutputs outputs = ByteSequenceOutputs.getSingleton();
// TODO: are we using the best sharing options?

View File

@ -30,7 +30,6 @@ import org.apache.lucene.analysis.util.BaseTokenStreamFactoryTestCase;
*/
public class TestSoraniStemFilterFactory extends BaseTokenStreamFactoryTestCase {
@SuppressWarnings("resource")
public void testStemming() throws Exception {
Reader reader = new StringReader("پیاوەکان");
TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false);

View File

@ -1,22 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE hyphenation-info SYSTEM "hyphenation.dtd">
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
Copyright 1999-2004 The Apache Software Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!--
This file contains the hyphenation patterns for danish.

View File

@ -1,22 +1,17 @@
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE hyphenation-info SYSTEM "hyphenation.dtd">
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!--
This file contains the hyphenation patterns for danish.

View File

@ -1,21 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
Copyright 1999-2004 The Apache Software Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- Id: hyphenation.dtd,v 1.3 2004/02/27 18:34:59 jeremias Exp $ -->

View File

@ -257,7 +257,6 @@ public class TestBugInSomething extends BaseTokenStreamTestCase {
-119, 0, 92, 94, -36, 53, -9, -102, -18, 90, 94, -26, 31, 71, -20
};
Analyzer a = new Analyzer() {
@SuppressWarnings("deprecation")
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new WikipediaTokenizer();

View File

@ -82,6 +82,7 @@ import org.apache.lucene.analysis.miscellaneous.LimitTokenOffsetFilter;
import org.apache.lucene.analysis.miscellaneous.LimitTokenPositionFilter;
import org.apache.lucene.analysis.miscellaneous.StemmerOverrideFilter;
import org.apache.lucene.analysis.miscellaneous.StemmerOverrideFilter.StemmerOverrideMap;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilter;
import org.apache.lucene.analysis.path.PathHierarchyTokenizer;
import org.apache.lucene.analysis.path.ReversePathHierarchyTokenizer;
@ -111,7 +112,6 @@ import org.tartarus.snowball.SnowballProgram;
import org.xml.sax.InputSource;
/** tests random analysis chains */
@SuppressWarnings("deprecation")
public class TestRandomChains extends BaseTokenStreamTestCase {
static List<Constructor<? extends Tokenizer>> tokenizers;
@ -193,7 +193,7 @@ public class TestRandomChains extends BaseTokenStreamTestCase {
// TODO: it seems to mess up offsets!?
WikipediaTokenizer.class,
// TODO: needs to be a tokenizer, doesnt handle graph inputs properly (a shingle or similar following will then cause pain)
org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.class,
WordDelimiterFilter.class,
// Cannot correct offsets when a char filter had changed them:
WordDelimiterGraphFilter.class,
// requires a special encoded token value, so it may fail with random data:
@ -321,7 +321,6 @@ public class TestRandomChains extends BaseTokenStreamTestCase {
}
}
@SuppressWarnings("serial")
private static final Map<Class<?>,Function<Random,Object>> argProducers = new IdentityHashMap<Class<?>,Function<Random,Object>>() {{
put(int.class, random -> {
// TODO: could cause huge ram usage to use full int range for some filters

View File

@ -47,7 +47,6 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.SetOnce.AlreadySetException;
import org.apache.lucene.util.Version;
@SuppressWarnings("deprecation")
public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
// Test some examples (TODO: we only check behavior, we may need something like TestRandomChains...)

View File

@ -25,6 +25,7 @@ import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.synonym.SynonymFilter;
import org.apache.lucene.analysis.synonym.SynonymGraphFilter;
import org.apache.lucene.analysis.synonym.SynonymMap;
import org.apache.lucene.util.CharsRef;
@ -68,19 +69,17 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
assertTokenStreamContents(stream, new String[]{builder.toCharsRef().toString()}, null, null, new int[]{1});
}
@SuppressWarnings("deprecation")
@Test
public void testWithSynonym() throws Exception {
SynonymMap.Builder builder = new SynonymMap.Builder(true);
builder.add(new CharsRef("mykeyword"), new CharsRef("mysynonym"), true);
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
tokenizer.setReader(new StringReader("mykeyword"));
org.apache.lucene.analysis.synonym.SynonymFilter filter = new org.apache.lucene.analysis.synonym.SynonymFilter(tokenizer, builder.build(), true);
SynonymFilter filter = new SynonymFilter(tokenizer, builder.build(), true);
ConcatenateGraphFilter stream = new ConcatenateGraphFilter(filter);
assertTokenStreamContents(stream, new String[] {"mykeyword", "mysynonym"}, null, null, new int[] { 1, 0 });
}
@SuppressWarnings("deprecation")
@Test
public void testWithSynonyms() throws Exception {
SynonymMap.Builder builder = new SynonymMap.Builder(true);
@ -88,7 +87,7 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
Tokenizer tokenStream = new MockTokenizer(MockTokenizer.WHITESPACE, true);
String input = "mykeyword another keyword";
tokenStream.setReader(new StringReader(input));
org.apache.lucene.analysis.synonym.SynonymFilter filter = new org.apache.lucene.analysis.synonym.SynonymFilter(tokenStream, builder.build(), true);
SynonymFilter filter = new SynonymFilter(tokenStream, builder.build(), true);
ConcatenateGraphFilter stream = new ConcatenateGraphFilter(filter, SEP_LABEL, false, 100);
String[] expectedOutputs = new String[2];
CharsRefBuilder expectedOutput = new CharsRefBuilder();
@ -133,7 +132,6 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
}
}
@SuppressWarnings("deprecation")
@Test
public void testValidNumberOfExpansions() throws IOException {
SynonymMap.Builder builder = new SynonymMap.Builder(true);
@ -147,7 +145,7 @@ public class TestConcatenateGraphFilter extends BaseTokenStreamTestCase {
}
MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
tokenizer.setReader(new StringReader(valueBuilder.toString()));
org.apache.lucene.analysis.synonym.SynonymFilter filter = new org.apache.lucene.analysis.synonym.SynonymFilter(tokenizer, builder.build(), true);
SynonymFilter filter = new SynonymFilter(tokenizer, builder.build(), true);
int count;
try (ConcatenateGraphFilter stream = new ConcatenateGraphFilter(filter)) {

View File

@ -31,7 +31,6 @@ import org.apache.lucene.store.Directory;
public class TestFixBrokenOffsetsFilter extends BaseTokenStreamTestCase {
@SuppressWarnings("deprecation")
public void testBogusTermVectors() throws IOException {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));

View File

@ -20,6 +20,7 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.synonym.SynonymFilter;
import org.apache.lucene.analysis.synonym.SynonymMap;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
@ -59,7 +60,6 @@ public class TestLimitTokenPositionFilter extends BaseTokenStreamTestCase {
}
}
@SuppressWarnings("deprecation")
public void testMaxPosition3WithSynomyms() throws IOException {
for (final boolean consumeAll : new boolean[]{true, false}) {
MockTokenizer tokenizer = whitespaceMockTokenizer("one two three four five");
@ -76,7 +76,7 @@ public class TestLimitTokenPositionFilter extends BaseTokenStreamTestCase {
SynonymMap.Builder.join(new String[]{"dopple", "ganger"}, multiWordCharsRef);
builder.add(new CharsRef("two"), multiWordCharsRef.get(), true);
SynonymMap synonymMap = builder.build();
TokenStream stream = new org.apache.lucene.analysis.synonym.SynonymFilter(tokenizer, synonymMap, true);
TokenStream stream = new SynonymFilter(tokenizer, synonymMap, true);
stream = new LimitTokenPositionFilter(stream, 3, consumeAll);
// "only", the 4th word of multi-word synonym "and indubitably single only" is not emitted, since its position is greater than 3.

View File

@ -156,7 +156,6 @@ public class TestRemoveDuplicatesTokenFilter extends BaseTokenStreamTestCase {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
@SuppressWarnings("deprecation")
TokenStream stream = new SynonymFilter(tokenizer, map, ignoreCase);
return new TokenStreamComponents(tokenizer, new RemoveDuplicatesTokenFilter(stream));
}

View File

@ -54,7 +54,6 @@ import static org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator.DEF
* TODO: should explicitly test things like protWords and not rely on
* the factory tests in Solr.
*/
@SuppressWarnings("deprecation")
public class TestWordDelimiterFilter extends BaseTokenStreamTestCase {
/*

View File

@ -119,7 +119,6 @@ public class EdgeNGramTokenizerTest extends BaseTokenStreamTestCase {
false);
}
@SuppressWarnings("deprecation")
private static void testNGrams(int minGram, int maxGram, int length, final String nonTokenChars) throws IOException {
final String s = RandomStrings.randomAsciiOfLength(random(), length);
testNGrams(minGram, maxGram, s, nonTokenChars);

View File

@ -119,7 +119,6 @@ public class NGramTokenizerTest extends BaseTokenStreamTestCase {
}
}
@SuppressWarnings("deprecation")
private static void testNGrams(int minGram, int maxGram, int length, final String nonTokenChars) throws IOException {
final String s = RandomStrings.randomAsciiOfLength(random(), length);
testNGrams(minGram, maxGram, s, nonTokenChars);

View File

@ -47,7 +47,6 @@ public class TestSolrSynonymParser extends BaseSynonymParserTestCase {
analyzer.close();
analyzer = new Analyzer() {
@SuppressWarnings("deprecation")
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
@ -139,7 +138,6 @@ public class TestSolrSynonymParser extends BaseSynonymParserTestCase {
final SynonymMap map = parser.build();
analyzer.close();
analyzer = new Analyzer() {
@SuppressWarnings("deprecation")
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.KEYWORD, false);
@ -173,7 +171,6 @@ public class TestSolrSynonymParser extends BaseSynonymParserTestCase {
analyzer.close();
analyzer = new Analyzer() {
@SuppressWarnings("deprecation")
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);

View File

@ -30,7 +30,6 @@ import org.apache.lucene.util.Version;
public class TestSynonymFilterFactory extends BaseTokenStreamFactoryTestCase {
/** checks for synonyms of "GB" in synonyms.txt */
@SuppressWarnings("deprecation")
private void checkSolrSynonyms(TokenFilterFactory factory) throws Exception {
Reader reader = new StringReader("GB");
TokenStream stream = whitespaceMockTokenizer(reader);
@ -42,7 +41,6 @@ public class TestSynonymFilterFactory extends BaseTokenStreamFactoryTestCase {
}
/** checks for synonyms of "second" in synonyms-wordnet.txt */
@SuppressWarnings("deprecation")
private void checkWordnetSynonyms(TokenFilterFactory factory) throws Exception {
Reader reader = new StringReader("second");
TokenStream stream = whitespaceMockTokenizer(reader);

View File

@ -39,7 +39,6 @@ import org.apache.lucene.analysis.tokenattributes.*;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.TestUtil;
@SuppressWarnings("deprecation")
public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
private SynonymMap.Builder b;

View File

@ -45,7 +45,6 @@ public class TestWordnetSynonymParser extends BaseTokenStreamTestCase {
analyzer.close();
analyzer = new Analyzer() {
@SuppressWarnings("deprecation")
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);

View File

@ -29,7 +29,6 @@ import org.apache.lucene.util.Version;
public class TestAnalysisSPILoader extends LuceneTestCase {
@SuppressWarnings("serial")
private Map<String,String> versionArgOnly() {
return new HashMap<String, String>() {{
put("luceneMatchVersion", Version.LATEST.toString());

View File

@ -54,7 +54,6 @@ public class TestFilesystemResourceLoader extends LuceneTestCase {
rl.newInstance("org.apache.lucene.analysis.util.RollingCharBuffer", Object.class).getClass().getName());
}
@SuppressWarnings("deprecation")
public void testBaseDir() throws Exception {
final Path base = createTempDir("fsResourceLoaderBase");
Writer os = Files.newBufferedWriter(base.resolve("template.txt"), StandardCharsets.UTF_8);

View File

@ -73,7 +73,6 @@ public final class ICUTransformFilter extends TokenFilter {
* @param input {@link TokenStream} to filter.
* @param transform Transliterator to transform the text.
*/
@SuppressWarnings("deprecation")
public ICUTransformFilter(TokenStream input, Transliterator transform) {
super(input);
this.transform = transform;

View File

@ -169,7 +169,6 @@ public class SpatialDocMaker extends DocMaker {
final boolean bbox = config.get(configKeyPrefix + "bbox", false);
return new ShapeConverter() {
@SuppressWarnings("deprecation")
@Override
public Shape convert(Shape shape) {
if (shape instanceof Point && (radiusDegrees != 0.0 || plusMinus != 0.0)) {
@ -218,7 +217,6 @@ public class SpatialDocMaker extends DocMaker {
return doc;
}
@SuppressWarnings("deprecation")
public static Shape makeShapeFromString(SpatialStrategy strategy, String name, String shapeStr) {
if (shapeStr != null && shapeStr.length() > 0) {
try {

View File

@ -288,7 +288,7 @@ public class AnalyzerFactoryTask extends PerfTask {
* @param stok stream tokenizer from which to draw analysis factory params
* @param clazz analysis factory class to instantiate
*/
@SuppressWarnings({"fallthrough", "deprecation"})
@SuppressWarnings("fallthrough")
private void createAnalysisPipelineComponent
(StreamTokenizer stok, Class<? extends AbstractAnalysisFactory> clazz) {
Map<String,String> argMap = new HashMap<>();

View File

@ -33,7 +33,6 @@ import org.apache.lucene.util.SuppressForbidden;
@SuppressForbidden(reason = "Uses a Long instance as a marker")
public final class PositiveIntOutputs extends Outputs<Long> {
@SuppressWarnings("deprecation")
private final static Long NO_OUTPUT = new Long(0);
private final static PositiveIntOutputs singleton = new PositiveIntOutputs();

View File

@ -57,7 +57,6 @@ public class TestCharArraySet extends LuceneTestCase {
assertTrue(set.contains(new String(findme,1,4)));
}
@SuppressWarnings("deprecation")
@SuppressForbidden(reason = "Explicitly checking new Integers")
public void testObjectContains() {
CharArraySet set = new CharArraySet(10, true);
@ -212,6 +211,7 @@ public class TestCharArraySet extends LuceneTestCase {
}
}
@SuppressWarnings("deprecated")
public void testCopyCharArraySetBWCompat() {
CharArraySet setIngoreCase = new CharArraySet(10, true);
CharArraySet setCaseSensitive = new CharArraySet(10, false);

View File

@ -212,7 +212,6 @@ public class TestDocument extends LuceneTestCase {
dir.close();
}
@SuppressWarnings("deprecation")
public void testGetValues() {
Document doc = makeDocumentWithFields();
assertEquals(new String[] {"test1", "test2"},

View File

@ -149,7 +149,6 @@ public class TestFieldUpdatesBuffer extends LuceneTestCase {
assertFalse(buffer.isNumeric());
}
@SuppressWarnings("unchecked")
public <T extends DocValuesUpdate> T getRandomUpdate(boolean binary) {
String termField = RandomPicks.randomFrom(random(), Arrays.asList("id", "_id", "some_other_field"));
String docId = "" + random().nextInt(10);

View File

@ -33,6 +33,8 @@ import org.junit.Test;
import java.io.IOException;
import junit.framework.Assert;
public class TestMultiTermConstantScore extends BaseTestRangeFilter {
/** threshold for comparing floats */
@ -41,9 +43,8 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
static Directory small;
static IndexReader reader;
@SuppressWarnings("deprecation")
static public void assertEquals(String m, int e, int a) {
junit.framework.Assert.assertEquals(m, e, a);
Assert.assertEquals(m, e, a);
}
@BeforeClass

View File

@ -281,7 +281,6 @@ public class TestSearchAfter extends LuceneTestCase {
assertEquals(all.scoreDocs.length, pageStart);
}
@SuppressWarnings("deprecation")
void assertPage(int pageStart, TopDocs all, TopDocs paged) throws IOException {
assertEquals(all.totalHits.value, paged.totalHits.value);
for (int i = 0; i < paged.scoreDocs.length; i++) {

View File

@ -20,7 +20,6 @@ import java.util.Arrays;
public class TestCharsRef extends LuceneTestCase {
@SuppressWarnings("deprecation")
public void testUTF16InUTF8Order() {
final int numStrings = atLeast(1000);
BytesRef utf8[] = new BytesRef[numStrings];

View File

@ -23,7 +23,6 @@ import java.text.ParseException;
import java.util.Locale;
import java.util.Random;
@SuppressWarnings("deprecation")
public class TestVersion extends LuceneTestCase {
public void testOnOrAfter() throws Exception {

View File

@ -0,0 +1,95 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.util;
public class TestVirtualMethod extends LuceneTestCase {
private static final VirtualMethod<TestVirtualMethod> publicTestMethod =
new VirtualMethod<>(TestVirtualMethod.class, "publicTest", String.class);
private static final VirtualMethod<TestVirtualMethod> protectedTestMethod =
new VirtualMethod<>(TestVirtualMethod.class, "protectedTest", int.class);
public void publicTest(String test) {}
protected void protectedTest(int test) {}
static class TestClass1 extends TestVirtualMethod {
@Override
public void publicTest(String test) {}
@Override
protected void protectedTest(int test) {}
}
static class TestClass2 extends TestClass1 {
@Override // make it public here
public void protectedTest(int test) {}
}
static class TestClass3 extends TestClass2 {
@Override
public void publicTest(String test) {}
}
static class TestClass4 extends TestVirtualMethod {
}
static class TestClass5 extends TestClass4 {
}
public void testGeneral() {
assertEquals(0, publicTestMethod.getImplementationDistance(this.getClass()));
assertEquals(1, publicTestMethod.getImplementationDistance(TestClass1.class));
assertEquals(1, publicTestMethod.getImplementationDistance(TestClass2.class));
assertEquals(3, publicTestMethod.getImplementationDistance(TestClass3.class));
assertFalse(publicTestMethod.isOverriddenAsOf(TestClass4.class));
assertFalse(publicTestMethod.isOverriddenAsOf(TestClass5.class));
assertEquals(0, protectedTestMethod.getImplementationDistance(this.getClass()));
assertEquals(1, protectedTestMethod.getImplementationDistance(TestClass1.class));
assertEquals(2, protectedTestMethod.getImplementationDistance(TestClass2.class));
assertEquals(2, protectedTestMethod.getImplementationDistance(TestClass3.class));
assertFalse(protectedTestMethod.isOverriddenAsOf(TestClass4.class));
assertFalse(protectedTestMethod.isOverriddenAsOf(TestClass5.class));
assertTrue(VirtualMethod.compareImplementationDistance(TestClass3.class, publicTestMethod, protectedTestMethod) > 0);
assertEquals(0, VirtualMethod.compareImplementationDistance(TestClass5.class, publicTestMethod, protectedTestMethod));
}
@SuppressWarnings({"rawtypes","unchecked"})
public void testExceptions() {
// LuceneTestCase is not a subclass and can never override publicTest(String)
expectThrows(IllegalArgumentException.class, () -> {
// cast to Class to remove generics:
publicTestMethod.getImplementationDistance((Class) LuceneTestCase.class);
});
// Method bogus() does not exist, so IAE should be thrown
expectThrows(IllegalArgumentException.class, () -> {
new VirtualMethod<>(TestVirtualMethod.class, "bogus");
});
// Method publicTest(String) is not declared in TestClass2, so IAE should be thrown
expectThrows(IllegalArgumentException.class, () -> {
new VirtualMethod<>(TestClass2.class, "publicTest", String.class);
});
// try to create a second instance of the same baseClass / method combination
expectThrows(UnsupportedOperationException.class, () -> {
new VirtualMethod<>(TestVirtualMethod.class, "publicTest", String.class);
});
}
}

View File

@ -39,7 +39,6 @@ public class Test2BFST extends LuceneTestCase {
private static long LIMIT = 3L*1024*1024*1024;
@SuppressWarnings("deprecation")
public void test() throws Exception {
assumeWorkingMMapOnWindows();

View File

@ -567,7 +567,7 @@ public class TestFSTs extends LuceneTestCase {
} else {
// Get by output
final Long output = (Long) getOutput(intsRef.get(), ord);
@SuppressWarnings({"unchecked", "deprecation"}) final IntsRef actual = Util.getByOutput((FST<Long>) fst, output.longValue());
@SuppressWarnings("unchecked") final IntsRef actual = Util.getByOutput((FST<Long>) fst, output.longValue());
if (actual == null) {
throw new RuntimeException("unexpected null input from output=" + output);
}
@ -787,7 +787,6 @@ public class TestFSTs extends LuceneTestCase {
}
*/
@SuppressWarnings("deprecation")
public void testSimple() throws Exception {
// Get outputs -- passing true means FST will share

View File

@ -404,7 +404,6 @@ public class DrillSideways {
}
/** Runs a search, using a {@link CollectorManager} to gather and merge search results */
@SuppressWarnings("unchecked")
public <R> ConcurrentDrillSidewaysResult<R> search(final DrillDownQuery query,
final CollectorManager<?, R> hitCollectorManager) throws IOException {

View File

@ -287,11 +287,9 @@ public class TestGrouping extends LuceneTestCase {
private Collection<SearchGroup<BytesRef>> getSearchGroups(FirstPassGroupingCollector<?> c, int groupOffset) throws IOException {
if (TermGroupSelector.class.isAssignableFrom(c.getGroupSelector().getClass())) {
@SuppressWarnings("unchecked")
FirstPassGroupingCollector<BytesRef> collector = (FirstPassGroupingCollector<BytesRef>) c;
return collector.getTopGroups(groupOffset);
} else if (ValueSourceGroupSelector.class.isAssignableFrom(c.getGroupSelector().getClass())) {
@SuppressWarnings("unchecked")
FirstPassGroupingCollector<MutableValue> collector = (FirstPassGroupingCollector<MutableValue>) c;
Collection<SearchGroup<MutableValue>> mutableValueGroups = collector.getTopGroups(groupOffset);
if (mutableValueGroups == null) {
@ -406,6 +404,7 @@ public class TestGrouping extends LuceneTestCase {
};
}
@SuppressWarnings({"unchecked","rawtypes"})
private Comparable<?>[] fillFields(GroupDoc d, Sort sort) {
final SortField[] sortFields = sort.getSort();
final Comparable<?>[] fields = new Comparable[sortFields.length];
@ -491,7 +490,7 @@ public class TestGrouping extends LuceneTestCase {
final int limit = Math.min(groupOffset + topNGroups, groups.size());
final Comparator<GroupDoc> docSortComp = getComparator(docSort);
@SuppressWarnings({"unchecked"})
@SuppressWarnings({"unchecked","rawtypes"})
final GroupDocs<BytesRef>[] result = new GroupDocs[limit-groupOffset];
int totalGroupedHitCount = 0;
for(int idx=groupOffset;idx < limit;idx++) {
@ -1043,7 +1042,7 @@ public class TestGrouping extends LuceneTestCase {
}
// Get block grouping result:
sBlocks.search(query, c4);
@SuppressWarnings({"unchecked"})
@SuppressWarnings({"unchecked","rawtypes"})
final TopGroups<BytesRef> tempTopGroupsBlocks = (TopGroups<BytesRef>) c3.getTopGroups(docSort, groupOffset, docOffset, docOffset+docsPerGroup);
final TopGroups<BytesRef> groupsResultBlocks;
if (doAllGroups && tempTopGroupsBlocks != null) {
@ -1199,7 +1198,7 @@ public class TestGrouping extends LuceneTestCase {
if (mergedTopGroups != null) {
// Now 2nd pass:
@SuppressWarnings({"unchecked"})
@SuppressWarnings({"unchecked","rawtypes"})
final TopGroups<BytesRef>[] shardTopGroups = new TopGroups[subSearchers.length];
for(int shardIDX=0;shardIDX<subSearchers.length;shardIDX++) {
final TopGroupsCollector<?> secondPassCollector = createSecondPassCollector(firstPassGroupingCollectors.get(shardIDX),

View File

@ -2005,7 +2005,6 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
TopDocs hits = searcher.search(query, 10);
assertEquals(1, hits.scoreDocs.length);
@SuppressWarnings("deprecation")
TokenStream stream = TokenSources.getAnyTokenStream(searcher.getIndexReader(), 0, FIELD_NAME, analyzer);
if (random().nextBoolean()) {
stream = new CachingTokenFilter(stream);//conceals detection of TokenStreamFromTermVector

View File

@ -107,7 +107,6 @@ public class ToParentBlockJoinSortField extends SortField {
private FieldComparator<?> getStringComparator(int numHits) {
return new FieldComparator.TermOrdValComparator(numHits, getField(), missingValue == STRING_LAST) {
@SuppressWarnings("deprecation")
@Override
protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException {
SortedSetDocValues sortedSet = DocValues.getSortedSet(context.reader(), field);
@ -127,7 +126,6 @@ public class ToParentBlockJoinSortField extends SortField {
private FieldComparator<?> getIntComparator(int numHits) {
return new FieldComparator.IntComparator(numHits, getField(), (Integer) missingValue) {
@SuppressWarnings("deprecation")
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
SortedNumericDocValues sortedNumeric = DocValues.getSortedNumeric(context.reader(), field);
@ -146,7 +144,6 @@ public class ToParentBlockJoinSortField extends SortField {
private FieldComparator<?> getLongComparator(int numHits) {
return new FieldComparator.LongComparator(numHits, getField(), (Long) missingValue) {
@SuppressWarnings("deprecation")
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
SortedNumericDocValues sortedNumeric = DocValues.getSortedNumeric(context.reader(), field);
@ -165,7 +162,6 @@ public class ToParentBlockJoinSortField extends SortField {
private FieldComparator<?> getFloatComparator(int numHits) {
return new FieldComparator.FloatComparator(numHits, getField(), (Float) missingValue) {
@SuppressWarnings("deprecation")
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
SortedNumericDocValues sortedNumeric = DocValues.getSortedNumeric(context.reader(), field);
@ -190,7 +186,6 @@ public class ToParentBlockJoinSortField extends SortField {
private FieldComparator<?> getDoubleComparator(int numHits) {
return new FieldComparator.DoubleComparator(numHits, getField(), (Double) missingValue) {
@SuppressWarnings("deprecation")
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
SortedNumericDocValues sortedNumeric = DocValues.getSortedNumeric(context.reader(), field);

View File

@ -108,7 +108,6 @@ public class TestBlockJoinSelector extends LuceneTestCase {
: ((NumericDocValues) sdv).advanceExact(target);
}
@SuppressWarnings("deprecation")
public void testSortedSelector() throws IOException {
final BitSet parents = new FixedBitSet(20);
parents.set(0);
@ -218,7 +217,6 @@ public class TestBlockJoinSelector extends LuceneTestCase {
}
}
@SuppressWarnings("deprecation")
public void testNumericSelector() throws Exception {
final BitSet parents = new FixedBitSet(20);
parents.set(0);

View File

@ -335,7 +335,6 @@ public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
reader.close();
}
@SuppressWarnings("unused")
private Allocator randomByteBlockAllocator() {
if (random().nextBoolean()) {
return new RecyclingByteBlockAllocator();
@ -378,7 +377,6 @@ public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
}
// LUCENE-3831
@SuppressWarnings("resource")
public void testNullPointerException() throws IOException {
RegexpQuery regex = new RegexpQuery(new Term("field", "worl."));
SpanQuery wrappedquery = new SpanMultiTermQueryWrapper<>(regex);
@ -392,7 +390,6 @@ public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
}
// LUCENE-3831
@SuppressWarnings("resource")
public void testPassesIfWrapped() throws IOException {
RegexpQuery regex = new RegexpQuery(new Term("field", "worl."));
SpanQuery wrappedquery = new SpanOrQuery(new SpanMultiTermQueryWrapper<>(regex));

View File

@ -83,7 +83,6 @@ public final class UpToTwoPositiveIntOutputs extends Outputs<Object> {
}
}
@SuppressWarnings("deprecation")
private final static Long NO_OUTPUT = new Long(0);
private final boolean doShare;

View File

@ -23,7 +23,6 @@ import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.search.similarities.BaseSimilarityTestCase;
import org.apache.lucene.search.similarities.Similarity;
@SuppressWarnings("deprecation")
public class TestLegacyBM25Similarity extends BaseSimilarityTestCase {
public void testIllegalK1() {

View File

@ -42,7 +42,6 @@ import org.apache.lucene.search.SortField;
*
*
*/
@SuppressWarnings({"unchecked", "rawtypes", "deprecation"})
public abstract class ValueSource {
/**

View File

@ -144,7 +144,6 @@ public class DocFreqValueSource extends ValueSource {
return name() + '(' + field + ',' + val + ')';
}
@SuppressWarnings("rawtypes")
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
IndexSearcher searcher = (IndexSearcher)context.get("searcher");
@ -152,7 +151,6 @@ public class DocFreqValueSource extends ValueSource {
return new ConstIntDocValues(docfreq, this);
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
context.put("searcher",searcher);

View File

@ -40,13 +40,11 @@ public class MaxDocValueSource extends ValueSource {
return name() + "()";
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
context.put("searcher",searcher);
}
@SuppressWarnings("rawtypes")
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
IndexSearcher searcher = (IndexSearcher)context.get("searcher");

View File

@ -54,13 +54,11 @@ public class NormValueSource extends ValueSource {
return name() + '(' + field + ')';
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
context.put("searcher",searcher);
}
@SuppressWarnings("rawtypes")
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
IndexSearcher searcher = (IndexSearcher)context.get("searcher");

View File

@ -36,7 +36,6 @@ import org.apache.lucene.util.mutable.MutableValueFloat;
/**
* <code>QueryValueSource</code> returns the relevance score of the query
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public class QueryValueSource extends ValueSource {
final Query q;
final float defVal;

View File

@ -59,7 +59,6 @@ public class ScaleFloatFunction extends ValueSource {
float maxVal;
}
@SuppressWarnings({"unchecked", "rawtypes"})
private ScaleInfo createScaleInfo(Map context, LeafReaderContext readerContext) throws IOException {
final List<LeafReaderContext> leaves = ReaderUtil.getTopLevelContext(readerContext).leaves();
@ -100,7 +99,6 @@ public class ScaleFloatFunction extends ValueSource {
return scaleInfo;
}
@SuppressWarnings("rawtypes")
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
@ -134,7 +132,6 @@ public class ScaleFloatFunction extends ValueSource {
};
}
@SuppressWarnings("rawtypes")
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
source.createWeight(context, searcher);

View File

@ -47,13 +47,11 @@ public class SumTotalTermFreqValueSource extends ValueSource {
return name() + '(' + indexedField + ')';
}
@SuppressWarnings("rawtypes")
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
return (FunctionValues)context.get(this);
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
long sumTotalTermFreq = 0;

View File

@ -54,13 +54,11 @@ public class TotalTermFreqValueSource extends ValueSource {
return name() + '(' + field + ',' + val + ')';
}
@SuppressWarnings("rawtypes")
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
return (FunctionValues)context.get(this);
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
long totalTermFreq = 0;

View File

@ -210,7 +210,6 @@ public class TestPayloadCheckQuery extends LuceneTestCase {
assertFalse(query3.equals(query4));
}
@SuppressWarnings({"unchecked", "rawtypes"})
public void testRewrite() throws IOException {
SpanMultiTermQueryWrapper fiv = new SpanMultiTermQueryWrapper(new WildcardQuery(new Term("field", "fiv*")));
SpanMultiTermQueryWrapper hund = new SpanMultiTermQueryWrapper(new WildcardQuery(new Term("field", "hund*")));

View File

@ -109,12 +109,7 @@ public final class FastCharStream implements CharStream {
public final int getColumn() {
return bufferStart + bufferPosition;
}
/**
* @deprecated use getEndLine
* @see #getEndLine
*/
@Override
@Deprecated
public final int getLine() {
return 1;
}

View File

@ -65,7 +65,6 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
protected String field;
int phraseSlop = 0;
@SuppressWarnings("deprecation")
float fuzzyMinSim = FuzzyQuery.defaultMinSimilarity;
int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;
Locale locale = Locale.getDefault();
@ -589,7 +588,6 @@ public abstract class QueryParserBase extends QueryBuilder implements CommonQuer
* @param prefixLength prefix length
* @return new FuzzyQuery Instance
*/
@SuppressWarnings("deprecation")
protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) {
// FuzzyQuery doesn't yet allow constant score rewrite
String text = term.text();

View File

@ -36,7 +36,6 @@ public class FuzzyQueryNodeBuilder implements StandardQueryBuilder {
FuzzyQueryNode fuzzyNode = (FuzzyQueryNode) queryNode;
String text = fuzzyNode.getTextAsString();
@SuppressWarnings("deprecation")
int numEdits = FuzzyQuery.floatToEdits(fuzzyNode.getSimilarity(),
text.codePointCount(0, text.length()));

View File

@ -25,7 +25,6 @@ public class FuzzyConfig {
private int prefixLength = FuzzyQuery.defaultPrefixLength;
@SuppressWarnings("deprecation")
private float minSimilarity = FuzzyQuery.defaultMinSimilarity;
public FuzzyConfig() {}

View File

@ -106,21 +106,10 @@ public final class FastCharStream implements CharStream {
}
@Override
@Deprecated
/**
* Returns the column position of the character last read.
* @deprecated
* @see #getEndColumn
*/
public final int getColumn() {
return bufferStart + bufferPosition;
}
@Override
@Deprecated
/**
* @deprecated
* @see #getEndLine
*/
public final int getLine() {
return 1;
}

View File

@ -105,21 +105,10 @@ public final class FastCharStream implements CharStream {
}
@Override
@Deprecated
/**
* @deprecated
* @see #getEndColumn
*/
public final int getColumn() {
return bufferStart + bufferPosition;
}
@Override
@Deprecated
/**
* Returns the line number of the character last read.
* @deprecated
* @see #getEndLine
*/
public final int getLine() {
return 1;
}

View File

@ -33,7 +33,6 @@ import org.w3c.dom.NodeList;
public class FuzzyLikeThisQueryBuilder implements QueryBuilder {
private static final int DEFAULT_MAX_NUM_TERMS = 50;
@SuppressWarnings("deprecation")
private static final float DEFAULT_MIN_SIMILARITY = FuzzyQuery.defaultMinSimilarity;
private static final int DEFAULT_PREFIX_LENGTH = 1;
private static final boolean DEFAULT_IGNORE_TF = false;

View File

@ -27,7 +27,6 @@ import org.apache.lucene.queryparser.flexible.core.util.UnescapedCharSequence;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
@SuppressWarnings("deprecation")
public class TestQueryTreeBuilder extends LuceneTestCase {
@Test

View File

@ -51,7 +51,6 @@ public abstract class ReplicatorTestCase extends LuceneTestCase {
* Returns a new {@link Server HTTP Server} instance. To obtain its port, use
* {@link #serverPort(Server)}.
*/
@SuppressWarnings("deprecation")
public static synchronized Server newHttpServer(Handler handler) throws Exception {
// if this property is true, then jetty will be configured to use SSL
// leveraging the same system properties as java to specify

View File

@ -114,7 +114,6 @@ public class SimpleWKTShapeParser {
}
/** Parses a list of points into latitude and longitude arraylists */
@SuppressWarnings("rawtypes")
private static void parseCoordinates(StreamTokenizer stream, ArrayList lats, ArrayList lons)
throws IOException, ParseException {
boolean isOpenParen = false;
@ -138,7 +137,6 @@ public class SimpleWKTShapeParser {
}
/** parses a single coordinate, w/ optional 3rd dimension */
@SuppressWarnings({"unchecked", "rawtypes"})
private static void parseCoordinate(StreamTokenizer stream, ArrayList lats, ArrayList lons)
throws IOException, ParseException {
lons.add(nextNumber(stream));
@ -154,8 +152,8 @@ public class SimpleWKTShapeParser {
if (token.equals(EMPTY)) {
return null;
}
ArrayList<Double> lats = new ArrayList<>();
ArrayList<Double> lons = new ArrayList<>();
ArrayList<Double> lats = new ArrayList();
ArrayList<Double> lons = new ArrayList();
parseCoordinates(stream, lats, lons);
double[][] result = new double[lats.size()][2];
for (int i = 0; i < lats.size(); ++i) {
@ -170,8 +168,8 @@ public class SimpleWKTShapeParser {
if (token.equals(EMPTY)) {
return null;
}
ArrayList<Double> lats = new ArrayList<>();
ArrayList<Double> lons = new ArrayList<>();
ArrayList<Double> lats = new ArrayList();
ArrayList<Double> lons = new ArrayList();
parseCoordinates(stream, lats, lons);
return new Line(lats.stream().mapToDouble(i->i).toArray(), lons.stream().mapToDouble(i->i).toArray());
}
@ -182,7 +180,7 @@ public class SimpleWKTShapeParser {
if (token.equals(EMPTY)) {
return null;
}
ArrayList<Line> lines = new ArrayList<>();
ArrayList<Line> lines = new ArrayList();
lines.add(parseLine(stream));
while (nextCloserOrComma(stream).equals(COMMA)) {
lines.add(parseLine(stream));
@ -191,7 +189,6 @@ public class SimpleWKTShapeParser {
}
/** parses the hole of a polygon */
@SuppressWarnings({"rawtypes", "unchecked"})
private static Polygon parsePolygonHole(StreamTokenizer stream) throws IOException, ParseException {
ArrayList<Double> lats = new ArrayList();
ArrayList<Double> lons = new ArrayList();
@ -200,7 +197,6 @@ public class SimpleWKTShapeParser {
}
/** parses a POLYGON */
@SuppressWarnings({"rawtypes", "unchecked"})
private static Polygon parsePolygon(StreamTokenizer stream) throws IOException, ParseException {
if (nextEmptyOrOpen(stream).equals(EMPTY)) {
return null;
@ -226,7 +222,7 @@ public class SimpleWKTShapeParser {
if (token.equals(EMPTY)) {
return null;
}
ArrayList<Polygon> polygons = new ArrayList<>();
ArrayList<Polygon> polygons = new ArrayList();
polygons.add(parsePolygon(stream));
while (nextCloserOrComma(stream).equals(COMMA)) {
polygons.add(parsePolygon(stream));

View File

@ -133,7 +133,6 @@ public abstract class SpatialStrategy {
* scores will be 1 for indexed points at the center of the query shape and as
* low as ~0.1 at its furthest edges.
*/
@SuppressWarnings("deprecation")
public final DoubleValuesSource makeRecipDistanceValueSource(Shape queryShape) {
Rectangle bbox = queryShape.getBoundingBox();
double diagonalDist = ctx.getDistCalc().distance(

View File

@ -45,7 +45,6 @@ class BBoxValueSource extends ShapeValuesSource {
return "bboxShape(" + strategy.getFieldName() + ")";
}
@SuppressWarnings("deprecation")
@Override
public ShapeValues getValues(LeafReaderContext readerContext) throws IOException {
LeafReader reader = readerContext.reader();

View File

@ -88,7 +88,6 @@ public class HeatmapFacetCounter {
* @param facetLevel the target depth (detail) of cells.
* @param maxCells the maximum number of cells to return. If the cells exceed this count, an
*/
@SuppressWarnings("deprecation")
public static Heatmap calcFacets(PrefixTreeStrategy strategy, IndexReaderContext context, Bits topAcceptDocs,
Shape inputShape, final int facetLevel, int maxCells) throws IOException {
if (maxCells > (MAX_ROWS_OR_COLUMNS * MAX_ROWS_OR_COLUMNS)) {

View File

@ -102,7 +102,6 @@ public class WithinPrefixTreeQuery extends AbstractVisitingPrefixTreeQuery {
/** Returns a new shape that is larger than shape by at distErr.
*/
//TODO move this generic code elsewhere? Spatial4j?
@SuppressWarnings("deprecation")
protected Shape bufferShape(Shape shape, double distErr) {
if (distErr <= 0)
throw new IllegalArgumentException("distErr must be > 0");

View File

@ -84,7 +84,6 @@ public class PackedQuadPrefixTree extends QuadPrefixTree {
return new PackedQuadCell(0x0L);
}
@SuppressWarnings("deprecation")
@Override
public Cell getCell(Point p, int level) {
if (!robust) { // old method
@ -145,7 +144,6 @@ public class PackedQuadPrefixTree extends QuadPrefixTree {
checkBattenbergNotRobustly(QUAD[3], x + w, y - h, level, matches, term, shape, maxLevel);
}
@SuppressWarnings("deprecation")
protected void checkBattenbergNotRobustly(byte quad, double cx, double cy, int level, List<Cell> matches,
long term, Shape shape, int maxLevel) {
// short-circuit if we find a match for the point (no need to continue recursion)

View File

@ -138,7 +138,6 @@ public class QuadPrefixTree extends LegacyPrefixTree {
return maxLevels;
}
@SuppressWarnings("deprecation")
@Override
public Cell getCell(Point p, int level) {
if (!robust) { // old method
@ -209,7 +208,6 @@ public class QuadPrefixTree extends LegacyPrefixTree {
// if we actually use the range property in the query, this could be useful
}
@SuppressWarnings("deprecation")
protected void checkBattenbergNotRobustly(
char c,
double cx,
@ -323,7 +321,6 @@ public class QuadPrefixTree extends LegacyPrefixTree {
return shape;
}
@SuppressWarnings("deprecation")
protected Rectangle makeShape() {
BytesRef token = getTokenBytesNoLeaf(null);
double xmin = QuadPrefixTree.this.xmin;

View File

@ -112,7 +112,6 @@ public class SpatialArgsParser {
args.setDistErr(readDouble(nameValPairs.remove(DIST_ERR)));
}
@SuppressWarnings("deprecation")
protected Shape parseShape(String str, SpatialContext ctx) throws ParseException {
//return ctx.readShape(str);//still in Spatial4j 0.4 but will be deleted
return ctx.readShapeFromWkt(str);

View File

@ -66,6 +66,7 @@ public class Geo3dShapeFactory implements S2ShapeFactory {
private static final double DEFAULT_CIRCLE_ACCURACY = 1e-4;
private double circleAccuracy = DEFAULT_CIRCLE_ACCURACY;
@SuppressWarnings("unchecked")
public Geo3dShapeFactory(SpatialContext context, SpatialContextFactory factory) {
this.context = context;
this.planetModel = ((Geo3dSpatialContextFactory) factory).planetModel;
@ -196,7 +197,6 @@ public class Geo3dShapeFactory implements S2ShapeFactory {
}
@Override
@Deprecated // use a builder
public Shape lineString(List<Point> list, double distance) {
LineStringBuilder builder = lineString();
for (Point point : list) {
@ -207,7 +207,6 @@ public class Geo3dShapeFactory implements S2ShapeFactory {
}
@Override
@Deprecated // use a builder
public <S extends Shape> ShapeCollection<S> multiShape(List<S> list) {
throw new UnsupportedOperationException();
}
@ -327,6 +326,7 @@ public class Geo3dShapeFactory implements S2ShapeFactory {
}
}
@SuppressWarnings("unchecked")
@Override
public Shape build() {
GeoPolygonFactory.PolygonDescription description = new GeoPolygonFactory.PolygonDescription(points, polyHoles);

View File

@ -36,7 +36,6 @@ import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.shape.Point;
import org.locationtech.spatial4j.shape.Shape;
@SuppressWarnings("deprecation")
public class DistanceStrategyTest extends StrategyTestCase {
@ParametersFactory(argumentFormatting = "strategy=%s")
public static Iterable<Object[]> parameters() {

View File

@ -40,7 +40,6 @@ import org.locationtech.spatial4j.shape.Shape;
/**
* Based off of Solr 3's SpatialFilterTest.
*/
@SuppressWarnings("deprecation")
public class PortedSolr3Test extends StrategyTestCase {
@ParametersFactory(argumentFormatting = "strategy=%s")

View File

@ -102,13 +102,11 @@ public class QueryEqualsHashCodeTest extends LuceneTestCase {
assertTrue(first.hashCode() != second.hashCode());
}
@SuppressWarnings("deprecation")
private SpatialArgs makeArgs1() {
final Shape shape1 = ctx.makeRectangle(0, 0, 10, 10);
return new SpatialArgs(predicate, shape1);
}
@SuppressWarnings("deprecation")
private SpatialArgs makeArgs2() {
final Shape shape2 = ctx.makeRectangle(0, 0, 20, 20);
return new SpatialArgs(predicate, shape2);

View File

@ -23,7 +23,6 @@ import org.junit.Test;
import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.shape.Shape;
@SuppressWarnings("deprecation")
public class SpatialArgsTest extends LuceneTestCase {
@Test

View File

@ -53,7 +53,6 @@ import org.locationtech.spatial4j.shape.Shape;
* This class serves as example code to show how to use the Lucene spatial
* module.
*/
@SuppressWarnings("deprecation")
public class SpatialExample extends LuceneTestCase {
//Note: Test invoked via TestTestFramework.spatialExample()

View File

@ -119,7 +119,6 @@ public abstract class SpatialTestCase extends LuceneTestCase {
}
}
@SuppressWarnings("deprecation")
protected Point randomPoint() {
final Rectangle WB = ctx.getWorldBounds();
return ctx.makePoint(
@ -131,7 +130,6 @@ public abstract class SpatialTestCase extends LuceneTestCase {
return randomRectangle(ctx.getWorldBounds());
}
@SuppressWarnings("deprecation")
protected Rectangle randomRectangle(Rectangle bounds) {
double[] xNewStartAndWidth = randomSubRange(bounds.getMinX(), bounds.getWidth());
double xMin = xNewStartAndWidth[0];
@ -149,7 +147,6 @@ public abstract class SpatialTestCase extends LuceneTestCase {
}
/** Returns new minStart and new length that is inside the range specified by the arguments. */
@SuppressWarnings("deprecation")
protected double[] randomSubRange(double boundStart, double boundLen) {
if (boundLen >= 3 && usually()) { // typical
// prefer integers for ease of debugability ... and prefer 1/16th of bound

View File

@ -41,7 +41,6 @@ public class SpatialTestData {
* an "id", a "name" and the "shape". Empty lines and lines starting with a '#' are skipped.
* The stream is closed.
*/
@SuppressWarnings("deprecation")
public static Iterator<SpatialTestData> getTestData(InputStream in, SpatialContext ctx) throws IOException {
List<SpatialTestData> results = new ArrayList<>();
BufferedReader bufInput = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));

View File

@ -184,7 +184,6 @@ public abstract class StrategyTestCase extends SpatialTestCase {
return strategy.makeQuery(q.args);
}
@SuppressWarnings("deprecation")
protected void adoc(String id, String shapeStr) throws IOException, ParseException {
Shape shape = shapeStr==null ? null : ctx.readShapeFromWkt(shapeStr);
addDocument(newDoc(id, shape));

View File

@ -36,7 +36,6 @@ import org.locationtech.spatial4j.shape.Rectangle;
import org.locationtech.spatial4j.shape.Shape;
import org.locationtech.spatial4j.shape.impl.RectangleImpl;
@SuppressWarnings("deprecation")
public class TestBBoxStrategy extends RandomSpatialOpStrategyTestCase {
@Override

View File

@ -116,7 +116,6 @@ public class CompositeStrategyTest extends RandomSpatialOpStrategyTestCase {
}
//TODO move up
@SuppressWarnings("deprecation")
private Shape randomCircle() {
final Point point = randomPoint();
//TODO pick using gaussian

View File

@ -38,7 +38,6 @@ public class DateNRStrategyTest extends RandomSpatialOpStrategyTestCase {
long randomCalWindowMs;
@SuppressWarnings("deprecation")
@Before
public void setUp() throws Exception {
super.setUp();

View File

@ -43,7 +43,6 @@ import org.locationtech.spatial4j.shape.impl.RectangleImpl;
import static com.carrotsearch.randomizedtesting.RandomizedTest.atMost;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
@SuppressWarnings("deprecation")
public class HeatmapFacetCounterTest extends StrategyTestCase {
SpatialPrefixTree grid;

View File

@ -37,7 +37,6 @@ import org.junit.Test;
import java.text.ParseException;
import java.util.HashMap;
@SuppressWarnings("deprecation")
public class JtsPolygonTest extends StrategyTestCase {
private static final double LUCENE_4464_distErrPct = SpatialArgs.DEFAULT_DISTERRPCT;//DEFAULT 2.5%

View File

@ -45,7 +45,6 @@ import org.locationtech.spatial4j.shape.Shape;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomInt;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
@SuppressWarnings("deprecation")
public class NumberRangeFacetsTest extends StrategyTestCase {
DateRangePrefixTree tree;

View File

@ -63,7 +63,6 @@ import static org.locationtech.spatial4j.shape.SpatialRelation.WITHIN;
/** Randomized PrefixTree test that considers the fuzziness of the
* results introduced by grid approximation. */
@SuppressWarnings("deprecation")
public class RandomSpatialOpFuzzyPrefixTreeTest extends StrategyTestCase {
static final int ITERATIONS = 10;

View File

@ -34,7 +34,6 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween
/** Base test harness, ideally for SpatialStrategy impls that have exact results
* (not grid approximated), hence "not fuzzy".
*/
@SuppressWarnings("deprecation")
public abstract class RandomSpatialOpStrategyTestCase extends StrategyTestCase {
//Note: this is partially redundant with StrategyTestCase.runTestQuery & testOperation

View File

@ -31,7 +31,6 @@ import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
@SuppressWarnings("deprecation")
public class TestRecursivePrefixTreeStrategy extends StrategyTestCase {
private int maxLength;

View File

@ -30,9 +30,9 @@ import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
public class TestTermQueryPrefixGridStrategy extends SpatialTestCase {
@SuppressWarnings("deprecation")
@Test
public void testNGramPrefixGridLosAngeles() throws IOException {
SpatialContext ctx = SpatialContext.GEO;

View File

@ -79,7 +79,6 @@ public class SpatialPrefixTreeTest extends SpatialTestCase {
* A PrefixTree pruning optimization gone bad, applicable when optimize=true.
* See <a href="https://issues.apache.org/jira/browse/LUCENE-4770">LUCENE-4770</a>.
*/
@SuppressWarnings("deprecation")
@Test
public void testBadPrefixTreePrune() throws Exception {

View File

@ -46,7 +46,6 @@ import org.locationtech.spatial4j.shape.Shape;
import static org.locationtech.spatial4j.distance.DistanceUtils.DEGREES_TO_RADIANS;
@SuppressWarnings("deprecation")
public class Geo3dRptTest extends RandomSpatialOpStrategyTestCase {
private PlanetModel planetModel;

View File

@ -66,7 +66,6 @@ public class Geo3dShapeSphereModelRectRelationTest extends ShapeRectRelationTest
}
@SuppressWarnings({"unchecked", "rawtypes", "deprecation"})
@Test
public void testFailure2_LUCENE6475() {
GeoCircle geo3dCircle = GeoCircleFactory.makeGeoCircle(planetModel, 1.6282053147165243E-4 * RADIANS_PER_DEGREE,

View File

@ -36,7 +36,6 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.*;
* A base test class with utility methods to help test shapes.
* Extends from RandomizedTest.
*/
@SuppressWarnings("deprecation")
public abstract class RandomizedShapeTestCase extends LuceneTestCase {
protected static final double EPS = 10e-9;
@ -56,6 +55,7 @@ public abstract class RandomizedShapeTestCase extends LuceneTestCase {
this.ctx = ctx;
}
@SuppressWarnings("unchecked")
public static void checkShapesImplementEquals( Class<?>[] classes ) {
for( Class<?> clazz : classes ) {
try {

View File

@ -33,7 +33,6 @@ import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.shape.Circle;
import org.locationtech.spatial4j.shape.Point;
@SuppressWarnings("deprecation")
public class TestPointVectorStrategy extends StrategyTestCase {
@Before

View File

@ -23,6 +23,7 @@ import java.util.Set;
import org.apache.lucene.search.suggest.InputIterator;
import org.apache.lucene.search.suggest.Lookup;
import org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.util.Accountable;
@ -38,7 +39,6 @@ import org.apache.lucene.util.CharsRefBuilder;
* @deprecated Migrate to one of the newer suggesters which are much more RAM efficient.
*/
@Deprecated
@SuppressWarnings("deprecation")
public class JaspellLookup extends Lookup implements Accountable {
JaspellTernarySearchTrie trie = new JaspellTernarySearchTrie();
private boolean usePrefix = true;
@ -140,52 +140,52 @@ public class JaspellLookup extends Lookup implements Accountable {
private static final byte HI_KID = 0x04;
private static final byte HAS_VALUE = 0x08;
private void readRecursively(DataInput in, org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode node) throws IOException {
private void readRecursively(DataInput in, TSTNode node) throws IOException {
node.splitchar = in.readString().charAt(0);
byte mask = in.readByte();
if ((mask & HAS_VALUE) != 0) {
node.data = Long.valueOf(in.readLong());
}
if ((mask & LO_KID) != 0) {
org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode kid = new org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode('\0', node);
node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.LOKID] = kid;
TSTNode kid = new TSTNode('\0', node);
node.relatives[TSTNode.LOKID] = kid;
readRecursively(in, kid);
}
if ((mask & EQ_KID) != 0) {
org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode kid = new org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode('\0', node);
node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.EQKID] = kid;
TSTNode kid = new TSTNode('\0', node);
node.relatives[TSTNode.EQKID] = kid;
readRecursively(in, kid);
}
if ((mask & HI_KID) != 0) {
org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode kid = new org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode('\0', node);
node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.HIKID] = kid;
TSTNode kid = new TSTNode('\0', node);
node.relatives[TSTNode.HIKID] = kid;
readRecursively(in, kid);
}
}
private void writeRecursively(DataOutput out, org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode node) throws IOException {
private void writeRecursively(DataOutput out, TSTNode node) throws IOException {
if (node == null) {
return;
}
out.writeString(new String(new char[] {node.splitchar}, 0, 1));
byte mask = 0;
if (node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.LOKID] != null) mask |= LO_KID;
if (node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.EQKID] != null) mask |= EQ_KID;
if (node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.HIKID] != null) mask |= HI_KID;
if (node.relatives[TSTNode.LOKID] != null) mask |= LO_KID;
if (node.relatives[TSTNode.EQKID] != null) mask |= EQ_KID;
if (node.relatives[TSTNode.HIKID] != null) mask |= HI_KID;
if (node.data != null) mask |= HAS_VALUE;
out.writeByte(mask);
if (node.data != null) {
out.writeLong(((Number)node.data).longValue());
}
writeRecursively(out, node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.LOKID]);
writeRecursively(out, node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.EQKID]);
writeRecursively(out, node.relatives[org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode.HIKID]);
writeRecursively(out, node.relatives[TSTNode.LOKID]);
writeRecursively(out, node.relatives[TSTNode.EQKID]);
writeRecursively(out, node.relatives[TSTNode.HIKID]);
}
@Override
public boolean store(DataOutput output) throws IOException {
output.writeVLong(count);
org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode root = trie.getRoot();
TSTNode root = trie.getRoot();
if (root == null) { // empty tree
return false;
}
@ -196,7 +196,7 @@ public class JaspellLookup extends Lookup implements Accountable {
@Override
public boolean load(DataInput input) throws IOException {
count = input.readVLong();
org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode root = new org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode('\0', null);
TSTNode root = new TSTNode('\0', null);
readRecursively(input, root);
trie.setRoot(root);
return true;

View File

@ -38,7 +38,6 @@ import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
@SuppressWarnings("deprecation")
public class TestWordBreakSpellChecker extends LuceneTestCase {
private Directory dir;
private Analyzer analyzer;

View File

@ -40,6 +40,7 @@ import org.apache.lucene.search.suggest.analyzing.FreeTextSuggester;
import org.apache.lucene.search.suggest.analyzing.FuzzySuggester;
import org.apache.lucene.search.suggest.fst.FSTCompletionLookup;
import org.apache.lucene.search.suggest.fst.WFSTCompletionLookup;
import org.apache.lucene.search.suggest.jaspell.JaspellLookup;
import org.apache.lucene.search.suggest.tst.TSTLookup;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
@ -52,12 +53,12 @@ import org.junit.Ignore;
*/
@Ignore("COMMENT ME TO RUN BENCHMARKS!")
public class LookupBenchmarkTest extends LuceneTestCase {
@SuppressWarnings("deprecation")
@SuppressWarnings("unchecked")
private final List<Class<? extends Lookup>> benchmarkClasses = Arrays.asList(
FuzzySuggester.class,
AnalyzingSuggester.class,
AnalyzingInfixSuggester.class,
org.apache.lucene.search.suggest.jaspell.JaspellLookup.class,
JaspellLookup.class,
TSTLookup.class,
FSTCompletionLookup.class,
WFSTCompletionLookup.class,

View File

@ -51,7 +51,6 @@ public class PersistenceTest extends LuceneTestCase {
runTest(TSTLookup.class, true);
}
@SuppressWarnings("deprecation")
public void testJaspellPersistence() throws Exception {
runTest(JaspellLookup.class, true);
}

View File

@ -20,7 +20,7 @@ import java.io.IOException;
import java.util.List;
import java.util.Random;
import org.junit.Assert;
import junit.framework.Assert;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields;
@ -40,9 +40,9 @@ import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.Version;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertTrue;
/**
* Utility class for sanity-checking queries.

Some files were not shown because too many files have changed in this diff Show More