LUCENE-9172: nuke some compiler warnings

This commit is contained in:
Robert Muir 2020-01-27 06:08:30 -05:00
parent 5f964eeef2
commit fddb5314fc
No known key found for this signature in database
GPG Key ID: 817AE1DD322D7ECA
15 changed files with 54 additions and 46 deletions

View File

@ -20,7 +20,6 @@
* <p>
* Structure similar to {@link org.apache.lucene.codecs.blockterms.VariableGapTermsIndexWriter}
* with additional optimizations.
* <p>
* <ul>
* <li>Designed to be extensible</li>
* <li>Reduced on-heap memory usage.</li>

View File

@ -21,7 +21,6 @@
* Extension of {@link org.apache.lucene.codecs.uniformsplit} with Shared Terms principle:
* Terms are shared between all fields. It is particularly adapted to index a massive number of fields
* because all the terms are stored in a single FST dictionary.
* <p>
* <ul>
* <li>Designed to be extensible</li>
* <li>Highly reduced on-heap memory usage when dealing with a massive number of fields.</li>

View File

@ -35,7 +35,6 @@ import org.apache.lucene.util.NumericUtils;
* <p>
* This class defines the static methods for encoding the three vertices of a tessellated triangles as a seven dimension point.
* The coordinates are converted from double precision values into 32 bit integers so they are sortable at index time.
* <p>
*/
public final class ShapeField {
/** vertex coordinates are encoded as 4 byte integers */

View File

@ -114,7 +114,7 @@ public class SimpleWKTShapeParser {
}
/** Parses a list of points into latitude and longitude arraylists */
private static void parseCoordinates(StreamTokenizer stream, ArrayList lats, ArrayList lons)
private static void parseCoordinates(StreamTokenizer stream, ArrayList<Double> lats, ArrayList<Double> lons)
throws IOException, ParseException {
boolean isOpenParen = false;
if (isNumberNext(stream) || (isOpenParen = nextWord(stream).equals(LPAREN))) {
@ -137,7 +137,7 @@ public class SimpleWKTShapeParser {
}
/** parses a single coordinate, w/ optional 3rd dimension */
private static void parseCoordinate(StreamTokenizer stream, ArrayList lats, ArrayList lons)
private static void parseCoordinate(StreamTokenizer stream, ArrayList<Double> lats, ArrayList<Double> lons)
throws IOException, ParseException {
lons.add(nextNumber(stream));
lats.add(nextNumber(stream));
@ -152,8 +152,8 @@ public class SimpleWKTShapeParser {
if (token.equals(EMPTY)) {
return null;
}
ArrayList<Double> lats = new ArrayList();
ArrayList<Double> lons = new ArrayList();
ArrayList<Double> lats = new ArrayList<>();
ArrayList<Double> lons = new ArrayList<>();
parseCoordinates(stream, lats, lons);
double[][] result = new double[lats.size()][2];
for (int i = 0; i < lats.size(); ++i) {
@ -168,8 +168,8 @@ public class SimpleWKTShapeParser {
if (token.equals(EMPTY)) {
return null;
}
ArrayList<Double> lats = new ArrayList();
ArrayList<Double> lons = new ArrayList();
ArrayList<Double> lats = new ArrayList<>();
ArrayList<Double> lons = new ArrayList<>();
parseCoordinates(stream, lats, lons);
return new Line(lats.stream().mapToDouble(i->i).toArray(), lons.stream().mapToDouble(i->i).toArray());
}
@ -180,7 +180,7 @@ public class SimpleWKTShapeParser {
if (token.equals(EMPTY)) {
return null;
}
ArrayList<Line> lines = new ArrayList();
ArrayList<Line> lines = new ArrayList<>();
lines.add(parseLine(stream));
while (nextCloserOrComma(stream).equals(COMMA)) {
lines.add(parseLine(stream));
@ -190,8 +190,8 @@ public class SimpleWKTShapeParser {
/** parses the hole of a polygon */
private static Polygon parsePolygonHole(StreamTokenizer stream) throws IOException, ParseException {
ArrayList<Double> lats = new ArrayList();
ArrayList<Double> lons = new ArrayList();
ArrayList<Double> lats = new ArrayList<>();
ArrayList<Double> lons = new ArrayList<>();
parseCoordinates(stream, lats, lons);
return new Polygon(lats.stream().mapToDouble(i->i).toArray(), lons.stream().mapToDouble(i->i).toArray());
}
@ -202,8 +202,8 @@ public class SimpleWKTShapeParser {
return null;
}
nextOpener(stream);
ArrayList<Double> lats = new ArrayList();
ArrayList<Double> lons = new ArrayList();
ArrayList<Double> lats = new ArrayList<>();
ArrayList<Double> lons = new ArrayList<>();
parseCoordinates(stream, lats, lons);
ArrayList<Polygon> holes = new ArrayList<>();
while (nextCloserOrComma(stream).equals(COMMA)) {
@ -222,7 +222,7 @@ public class SimpleWKTShapeParser {
if (token.equals(EMPTY)) {
return null;
}
ArrayList<Polygon> polygons = new ArrayList();
ArrayList<Polygon> polygons = new ArrayList<>();
polygons.add(parsePolygon(stream));
while (nextCloserOrComma(stream).equals(COMMA)) {
polygons.add(parsePolygon(stream));

View File

@ -50,6 +50,7 @@ public class TestAnalyzerWrapper extends LuceneTestCase {
};
try (TokenStream ts = wrapped.tokenStream("", "text")) {
assert ts != null;
assertTrue(sourceCalled.get());
}

View File

@ -101,7 +101,7 @@ public class TestLatLonPolygonShapeQueries extends BaseLatLonShapeTestCase {
public boolean testComponentQuery(Component2D query, Object o) {
Polygon shape = (Polygon) o;
if (queryRelation == QueryRelation.CONTAINS) {
return testWithinPolygon(query, (Polygon) shape);
return testWithinPolygon(query, shape);
}
List<Tessellator.Triangle> tessellation = Tessellator.tessellate(shape);
for (Tessellator.Triangle t : tessellation) {

View File

@ -74,7 +74,7 @@ public class TestXYPolygonShapeQueries extends BaseXYShapeTestCase {
public boolean testComponentQuery(Component2D query, Object o) {
XYPolygon shape = (XYPolygon) o;
if (queryRelation == QueryRelation.CONTAINS) {
return testWithinPolygon(query, (XYPolygon) shape);
return testWithinPolygon(query, shape);
}
List<Tessellator.Triangle> tessellation = Tessellator.tessellate(shape);
for (Tessellator.Triangle t : tessellation) {

View File

@ -149,30 +149,31 @@ public class TestFieldUpdatesBuffer extends LuceneTestCase {
assertFalse(buffer.isNumeric());
}
public <T extends DocValuesUpdate> T getRandomUpdate(boolean binary) {
DocValuesUpdate.BinaryDocValuesUpdate getRandomBinaryUpdate() {
String termField = RandomPicks.randomFrom(random(), Arrays.asList("id", "_id", "some_other_field"));
String docId = "" + random().nextInt(10);
if (binary) {
DocValuesUpdate.BinaryDocValuesUpdate value = new DocValuesUpdate.BinaryDocValuesUpdate(new Term(termField, docId), "binary",
rarely() ? null : new BytesRef(TestUtil.randomRealisticUnicodeString(random())));
return (T) (rarely() ? value.prepareForApply(random().nextInt(100)) : value);
} else {
return rarely() ? value.prepareForApply(random().nextInt(100)) : value;
}
DocValuesUpdate.NumericDocValuesUpdate getRandomNumericUpdate() {
String termField = RandomPicks.randomFrom(random(), Arrays.asList("id", "_id", "some_other_field"));
String docId = "" + random().nextInt(10);
DocValuesUpdate.NumericDocValuesUpdate value = new DocValuesUpdate.NumericDocValuesUpdate(new Term(termField, docId), "numeric",
rarely() ? null : Long.valueOf(random().nextInt(100)));
return (T) (rarely() ? value.prepareForApply(random().nextInt(100)) : value);
}
return rarely() ? value.prepareForApply(random().nextInt(100)) : value;
}
public void testBinaryRandom() throws IOException {
List<DocValuesUpdate.BinaryDocValuesUpdate> updates = new ArrayList<>();
int numUpdates = 1 + random().nextInt(1000);
Counter counter = Counter.newCounter();
DocValuesUpdate.BinaryDocValuesUpdate randomUpdate = getRandomUpdate(true);
DocValuesUpdate.BinaryDocValuesUpdate randomUpdate = getRandomBinaryUpdate();
updates.add(randomUpdate);
FieldUpdatesBuffer buffer = new FieldUpdatesBuffer(counter, randomUpdate, randomUpdate.docIDUpto);
for (int i = 0; i < numUpdates; i++) {
randomUpdate = getRandomUpdate(true);
randomUpdate = getRandomBinaryUpdate();
updates.add(randomUpdate);
if (randomUpdate.hasValue) {
buffer.addUpdate(randomUpdate.term, randomUpdate.getValue(), randomUpdate.docIDUpto);
@ -203,11 +204,11 @@ public class TestFieldUpdatesBuffer extends LuceneTestCase {
List<DocValuesUpdate.NumericDocValuesUpdate> updates = new ArrayList<>();
int numUpdates = 1 + random().nextInt(1000);
Counter counter = Counter.newCounter();
DocValuesUpdate.NumericDocValuesUpdate randomUpdate = getRandomUpdate(false);
DocValuesUpdate.NumericDocValuesUpdate randomUpdate = getRandomNumericUpdate();
updates.add(randomUpdate);
FieldUpdatesBuffer buffer = new FieldUpdatesBuffer(counter, randomUpdate, randomUpdate.docIDUpto);
for (int i = 0; i < numUpdates; i++) {
randomUpdate = getRandomUpdate(false);
randomUpdate = getRandomNumericUpdate();
updates.add(randomUpdate);
if (randomUpdate.hasValue) {
buffer.addUpdate(randomUpdate.term, randomUpdate.getValue(), randomUpdate.docIDUpto);

View File

@ -2686,6 +2686,7 @@ public class TestIndexWriter extends LuceneTestCase {
List<Closeable> toClose = new ArrayList<>();
try (FSDirectory dir = new SimpleFSDirectory(root);
Closeable closeable = () -> IOUtils.close(toClose)) {
assert closeable != null;
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()))
.setUseCompoundFile(false)
.setMergePolicy(NoMergePolicy.INSTANCE) // avoid merging away the randomFile
@ -3675,10 +3676,14 @@ public class TestIndexWriter extends LuceneTestCase {
for (int newMajor = Version.LATEST.major - 1; newMajor <= Version.LATEST.major; newMajor++) {
for (OpenMode openMode : OpenMode.values()) {
try (Directory dir = newDirectory()) {
try (IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setIndexCreatedVersionMajor(previousMajor))) {}
try (IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setIndexCreatedVersionMajor(previousMajor))) {
assert w != null;
}
SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
assertEquals(previousMajor, infos.getIndexCreatedVersionMajor());
try (IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setOpenMode(openMode).setIndexCreatedVersionMajor(newMajor))) {}
try (IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setOpenMode(openMode).setIndexCreatedVersionMajor(newMajor))) {
assert w != null;
}
infos = SegmentInfos.readLatestCommit(dir);
if (openMode == OpenMode.CREATE) {
assertEquals(newMajor, infos.getIndexCreatedVersionMajor());

View File

@ -215,7 +215,7 @@ public class TestConstantScoreScorer extends LuceneTestCase {
}
@Override
public void close() throws Exception {
public void close() throws IOException {
reader.close();
directory.close();
}

View File

@ -217,7 +217,7 @@ public class TestSearchAfter extends LuceneTestCase {
System.out.println("\nassertQuery " + (iter++) + ": query=" + query + " sort=" + sort + " pageSize=" + pageSize);
}
final boolean doScores;
final TopDocsCollector allCollector;
final TopDocsCollector<?> allCollector;
if (sort == null) {
allCollector = TopScoreDocCollector.create(maxDoc, null, Integer.MAX_VALUE);
doScores = false;
@ -245,7 +245,7 @@ public class TestSearchAfter extends LuceneTestCase {
ScoreDoc lastBottom = null;
while (pageStart < all.totalHits.value) {
TopDocs paged;
final TopDocsCollector pagedCollector;
final TopDocsCollector<?> pagedCollector;
if (sort == null) {
if (VERBOSE) {
System.out.println(" iter lastBottom=" + lastBottom);

View File

@ -128,10 +128,10 @@ public class TestTopDocsCollector extends LuceneTestCase {
try {
IndexSearcher searcher = new IndexSearcher(indexReader, service);
CollectorManager collectorManager = TopScoreDocCollector.createSharedManager(numResults,
CollectorManager<TopScoreDocCollector,TopDocs> collectorManager = TopScoreDocCollector.createSharedManager(numResults,
null, threshold);
return (TopDocs) searcher.search(q, collectorManager);
return searcher.search(q, collectorManager);
} finally {
service.shutdown();
}
@ -344,7 +344,7 @@ public class TestTopDocsCollector extends LuceneTestCase {
assertEquals(2, reader.leaves().size());
w.close();
TopDocsCollector collector = doSearchWithThreshold( 5, 10, q, reader);
TopDocsCollector<ScoreDoc> collector = doSearchWithThreshold( 5, 10, q, reader);
TopDocs tdc = doConcurrentSearchWithThreshold(5, 10, q, reader);
TopDocs tdc2 = collector.topDocs();
@ -556,7 +556,7 @@ public class TestTopDocsCollector extends LuceneTestCase {
.build()
};
for (Query query : queries) {
TopDocsCollector collector = doSearchWithThreshold(5, 0, query, indexReader);
TopDocsCollector<ScoreDoc> collector = doSearchWithThreshold(5, 0, query, indexReader);
TopDocs tdc = doConcurrentSearchWithThreshold(5, 0, query, indexReader);
TopDocs tdc2 = collector.topDocs();
@ -582,7 +582,6 @@ public class TestTopDocsCollector extends LuceneTestCase {
IndexReader reader = writer.getReader();
writer.close();
final IndexSearcher s = newSearcher(reader);
Terms terms = MultiTerms.getTerms(reader, "body");
int termCount = 0;
TermsEnum termsEnum = terms.iterator();
@ -599,7 +598,7 @@ public class TestTopDocsCollector extends LuceneTestCase {
BytesRef term = BytesRef.deepCopyOf(termsEnum.term());
Query query = new TermQuery(new Term("body", term));
TopDocsCollector collector = doSearchWithThreshold(5, 0, query, reader);
TopDocsCollector<ScoreDoc> collector = doSearchWithThreshold(5, 0, query, reader);
TopDocs tdc = doConcurrentSearchWithThreshold(5, 0, query, reader);
TopDocs tdc2 = collector.topDocs();

View File

@ -88,10 +88,10 @@ public class TestTopFieldCollector extends LuceneTestCase {
try {
IndexSearcher searcher = new IndexSearcher(indexReader, service);
CollectorManager collectorManager = TopFieldCollector.createSharedManager(sort, numResults,
CollectorManager<TopFieldCollector,TopFieldDocs> collectorManager = TopFieldCollector.createSharedManager(sort, numResults,
null, threshold);
TopDocs tdc = (TopDocs) searcher.search(q, collectorManager);
TopDocs tdc = searcher.search(q, collectorManager);
return tdc;
} finally {
@ -155,10 +155,10 @@ public class TestTopFieldCollector extends LuceneTestCase {
is.search(q, tdc);
CollectorManager tsdc = TopFieldCollector.createSharedManager(sort[i], 10, null, Integer.MAX_VALUE);
CollectorManager<TopFieldCollector,TopFieldDocs> tsdc = TopFieldCollector.createSharedManager(sort[i], 10, null, Integer.MAX_VALUE);
TopDocs td = tdc.topDocs();
TopDocs td2 = (TopDocs) concurrentSearcher.search(q, tsdc);
TopDocs td2 = concurrentSearcher.search(q, tsdc);
ScoreDoc[] sd = td.scoreDocs;
for(int j = 0; j < sd.length; j++) {
assertTrue(Float.isNaN(sd[j].score));

View File

@ -177,6 +177,7 @@ public class TestFileSwitchDirectory extends BaseDirectoryTestCase {
Function<String[], Long> stripExtra = array -> Arrays.asList(array).stream()
.filter(f -> f.startsWith("extra") == false).count();
try (IndexInput indexInput = dir.openInput("foo.tim", IOContext.DEFAULT)) {
assert indexInput != null;
dir.deleteFile("foo.tim");
assertEquals(1, dir.getPrimaryDir().getPendingDeletions().size());
assertEquals(1, dir.getPendingDeletions().size());

View File

@ -467,6 +467,7 @@ public abstract class BaseDirectoryTestCase extends LuceneTestCase {
for (int i = 0, max = RandomizedTest.randomIntBetween(500, 1000); i < max; i++) {
String fileName = "file-" + i;
try (IndexOutput output = dir.createOutput(fileName, newIOContext(random()))) {
assert output != null;
// Add some lags so that the other thread can read the content of the directory.
Thread.yield();
}
@ -492,6 +493,7 @@ public abstract class BaseDirectoryTestCase extends LuceneTestCase {
String file = RandomPicks.randomFrom(rnd, files);
try (IndexInput input = dir.openInput(file, newIOContext(random()))) {
// Just open, nothing else.
assert input != null;
} catch (AccessDeniedException e) {
// Access denied is allowed for files for which the output is still open (MockDirectoryWriter enforces
// this, for example). Since we don't synchronize with the writer thread, just ignore it.
@ -1102,11 +1104,13 @@ public abstract class BaseDirectoryTestCase extends LuceneTestCase {
try (Directory dir = getDirectory(createTempDir())) {
String name = "file";
try (IndexOutput out = dir.createOutput(name, IOContext.DEFAULT)) {
assert out != null;
}
// Try to create an existing file should fail.
expectThrows(FileAlreadyExistsException.class, () -> {
try (IndexOutput out = dir.createOutput(name, IOContext.DEFAULT)) {
assert out != null;
}
});