Enable LongDoubleConversion error-prone check (#12010)

This commit is contained in:
Robert Muir 2022-12-12 20:55:39 -05:00 committed by GitHub
parent e34234ca6c
commit 06f9179295
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 50 additions and 40 deletions

View File

@ -378,7 +378,7 @@ allprojects { prj ->
// '-Xep:LiteProtoToString:OFF', // we don't use protobuf // '-Xep:LiteProtoToString:OFF', // we don't use protobuf
// '-Xep:LockNotBeforeTry:OFF', // TODO: there are problems // '-Xep:LockNotBeforeTry:OFF', // TODO: there are problems
'-Xep:LogicalAssignment:WARN', '-Xep:LogicalAssignment:WARN',
// '-Xep:LongDoubleConversion:OFF', // TODO: there are problems '-Xep:LongDoubleConversion:WARN',
'-Xep:LongFloatConversion:WARN', '-Xep:LongFloatConversion:WARN',
'-Xep:LoopOverCharArray:WARN', '-Xep:LoopOverCharArray:WARN',
'-Xep:MalformedInlineTag:WARN', '-Xep:MalformedInlineTag:WARN',

View File

@ -53,7 +53,8 @@ public class TestLucene60PointsFormat extends BasePointsFormatTestCase {
return codec; return codec;
} }
@SuppressWarnings("NarrowCalculation") // TODO: clean up the math/estimation here rather than suppress so many warnings
@SuppressWarnings({"NarrowCalculation", "LongDoubleConversion"})
public void testEstimatePointCount() throws IOException { public void testEstimatePointCount() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(); IndexWriterConfig iwc = newIndexWriterConfig();
@ -179,7 +180,8 @@ public class TestLucene60PointsFormat extends BasePointsFormatTestCase {
// The tree is always balanced in the N dims case, and leaves are // The tree is always balanced in the N dims case, and leaves are
// not all full so things are a bit different // not all full so things are a bit different
@SuppressWarnings("NarrowCalculation") // TODO: clean up the math/estimation here rather than suppress so many warnings
@SuppressWarnings({"NarrowCalculation", "LongDoubleConversion"})
public void testEstimatePointCount2Dims() throws IOException { public void testEstimatePointCount2Dims() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());

View File

@ -101,7 +101,8 @@ public class TestLucene86PointsFormat extends BasePointsFormatTestCase {
super.testMergeStability(); super.testMergeStability();
} }
@SuppressWarnings("NarrowCalculation") // TODO: clean up the math/estimation here rather than suppress so many warnings
@SuppressWarnings({"NarrowCalculation", "LongDoubleConversion"})
public void testEstimatePointCount() throws IOException { public void testEstimatePointCount() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(); IndexWriterConfig iwc = newIndexWriterConfig();
@ -227,7 +228,8 @@ public class TestLucene86PointsFormat extends BasePointsFormatTestCase {
// The tree is always balanced in the N dims case, and leaves are // The tree is always balanced in the N dims case, and leaves are
// not all full so things are a bit different // not all full so things are a bit different
@SuppressWarnings("NarrowCalculation") // TODO: clean up the math/estimation here rather than suppress so many warnings
@SuppressWarnings({"NarrowCalculation", "LongDoubleConversion"})
public void testEstimatePointCount2Dims() throws IOException { public void testEstimatePointCount2Dims() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());

View File

@ -147,8 +147,8 @@ public class BooleanPerceptronClassifier implements Classifier<Boolean> {
Boolean assignedClass = classificationResult.getAssignedClass(); Boolean assignedClass = classificationResult.getAssignedClass();
Boolean correctClass = Boolean.valueOf(classField.stringValue()); Boolean correctClass = Boolean.valueOf(classField.stringValue());
long modifier = correctClass.compareTo(assignedClass); double modifier = Math.signum(correctClass.compareTo(assignedClass));
if (modifier != 0) { if (modifier != 0D) {
updateWeights( updateWeights(
indexReader, indexReader,
scoreDoc.doc, scoreDoc.doc,

View File

@ -267,8 +267,8 @@ final class LongDistanceFeatureQuery extends Query {
return doc; return doc;
} }
private float score(double distance) { private float score(long distance) {
return (float) (boost * (pivotDistance / (pivotDistance + distance))); return (float) (boost * (pivotDistance / (pivotDistance + (double) distance)));
} }
/** /**

View File

@ -494,7 +494,7 @@ public abstract class LogMergePolicy extends MergePolicy {
} }
final SegmentInfoAndLevel infoLevel = final SegmentInfoAndLevel infoLevel =
new SegmentInfoAndLevel(info, (float) Math.log(size) / norm); new SegmentInfoAndLevel(info, (float) Math.log((double) size) / norm);
levels.add(infoLevel); levels.add(infoLevel);
if (verbose(mergeContext)) { if (verbose(mergeContext)) {
@ -517,7 +517,7 @@ public abstract class LogMergePolicy extends MergePolicy {
final float levelFloor; final float levelFloor;
if (minMergeSize <= 0) levelFloor = (float) 0.0; if (minMergeSize <= 0) levelFloor = (float) 0.0;
else levelFloor = (float) (Math.log(minMergeSize) / norm); else levelFloor = (float) (Math.log((double) minMergeSize) / norm);
// Now, we quantize the log values into levels. The // Now, we quantize the log values into levels. The
// first level is any segment whose log size is within // first level is any segment whose log size is within

View File

@ -693,7 +693,7 @@ public class TieredMergePolicy extends MergePolicy {
// don't want to make this exponent too large else we // don't want to make this exponent too large else we
// can end up doing poor merges of small segments in // can end up doing poor merges of small segments in
// order to avoid the large merges: // order to avoid the large merges:
mergeScore *= Math.pow(totAfterMergeBytes, 0.05); mergeScore *= Math.pow((double) totAfterMergeBytes, 0.05);
// Strongly favor merges that reclaim deletes: // Strongly favor merges that reclaim deletes:
final double nonDelRatio = ((double) totAfterMergeBytes) / totBeforeMergeBytes; final double nonDelRatio = ((double) totAfterMergeBytes) / totBeforeMergeBytes;

View File

@ -34,7 +34,7 @@ public class BasicModelIne extends BasicModel {
public final double score(BasicStats stats, double tfn, double aeTimes1pTfn) { public final double score(BasicStats stats, double tfn, double aeTimes1pTfn) {
long N = stats.getNumberOfDocuments(); long N = stats.getNumberOfDocuments();
long F = stats.getTotalTermFreq(); long F = stats.getTotalTermFreq();
double ne = N * (1 - Math.pow((N - 1) / (double) N, F)); double ne = N * (1 - Math.pow((N - 1) / (double) N, (double) F));
double A = log2((N + 1) / (ne + 0.5)); double A = log2((N + 1) / (ne + 0.5));
// basic model I(ne) should return A * tfn // basic model I(ne) should return A * tfn

View File

@ -98,7 +98,8 @@ public class TestLucene90PointsFormat extends BasePointsFormatTestCase {
super.testMergeStability(); super.testMergeStability();
} }
@SuppressWarnings("NarrowCalculation") // TODO: clean up the math/estimation here rather than suppress so many warnings
@SuppressWarnings({"NarrowCalculation", "LongDoubleConversion"})
public void testEstimatePointCount() throws IOException { public void testEstimatePointCount() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(); IndexWriterConfig iwc = newIndexWriterConfig();
@ -224,7 +225,8 @@ public class TestLucene90PointsFormat extends BasePointsFormatTestCase {
// The tree is always balanced in the N dims case, and leaves are // The tree is always balanced in the N dims case, and leaves are
// not all full so things are a bit different // not all full so things are a bit different
@SuppressWarnings("NarrowCalculation") // TODO: clean up the math/estimation here rather than suppress so many warnings
@SuppressWarnings({"NarrowCalculation", "LongDoubleConversion"})
public void testEstimatePointCount2Dims() throws IOException { public void testEstimatePointCount2Dims() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());

View File

@ -541,7 +541,10 @@ public class TestLRUQueryCache extends LuceneTestCase {
final long actualRamBytesUsed = RamUsageTester.ramUsed(queryCache, acc); final long actualRamBytesUsed = RamUsageTester.ramUsed(queryCache, acc);
final long expectedRamBytesUsed = queryCache.ramBytesUsed(); final long expectedRamBytesUsed = queryCache.ramBytesUsed();
// error < 30% // error < 30%
assertEquals(actualRamBytesUsed, expectedRamBytesUsed, 30.d * actualRamBytesUsed / 100.d); assertEquals(
(double) actualRamBytesUsed,
(double) expectedRamBytesUsed,
30.d * actualRamBytesUsed / 100.d);
reader.close(); reader.close();
w.close(); w.close();

View File

@ -238,7 +238,8 @@ public class TestTermInSetQuery extends LuceneTestCase {
final long actualRamBytesUsed = RamUsageTester.ramUsed(query); final long actualRamBytesUsed = RamUsageTester.ramUsed(query);
final long expectedRamBytesUsed = query.ramBytesUsed(); final long expectedRamBytesUsed = query.ramBytesUsed();
// error margin within 5% // error margin within 5%
assertEquals(expectedRamBytesUsed, actualRamBytesUsed, actualRamBytesUsed / 20.d); assertEquals(
(double) expectedRamBytesUsed, (double) actualRamBytesUsed, actualRamBytesUsed / 20.d);
} }
private static class TermsCountingDirectoryReaderWrapper extends FilterDirectoryReader { private static class TermsCountingDirectoryReaderWrapper extends FilterDirectoryReader {

View File

@ -703,7 +703,7 @@ public class TestPackedInts extends LuceneTestCase {
} }
// test ramBytesUsed // test ramBytesUsed
assertEquals(RamUsageTester.ramUsed(writer), writer.ramBytesUsed(), 8.d); assertEquals((double) RamUsageTester.ramUsed(writer), (double) writer.ramBytesUsed(), 8.d);
// test copy // test copy
PagedGrowableWriter copy = PagedGrowableWriter copy =

View File

@ -593,8 +593,8 @@ public class TestRangeFacetCounts extends FacetTestCase {
Document doc = new Document(); Document doc = new Document();
DoubleDocValuesField field = new DoubleDocValuesField("field", 0.0); DoubleDocValuesField field = new DoubleDocValuesField("field", 0.0);
doc.add(field); doc.add(field);
for (long l = 0; l < 100; l++) { for (int i = 0; i < 100; i++) {
field.setDoubleValue(l); field.setDoubleValue(i);
w.addDocument(doc); w.addDocument(doc);
} }
@ -631,9 +631,9 @@ public class TestRangeFacetCounts extends FacetTestCase {
SortedNumericDocValuesField field2 = new SortedNumericDocValuesField("field", 0); SortedNumericDocValuesField field2 = new SortedNumericDocValuesField("field", 0);
doc.add(field1); doc.add(field1);
doc.add(field2); doc.add(field2);
for (long l = 0; l < 100; l++) { for (int i = 0; i < 100; i++) {
field1.setLongValue(NumericUtils.doubleToSortableLong(l)); field1.setLongValue(NumericUtils.doubleToSortableLong(i));
field2.setLongValue(NumericUtils.doubleToSortableLong(l)); field2.setLongValue(NumericUtils.doubleToSortableLong(i));
w.addDocument(doc); w.addDocument(doc);
} }

View File

@ -1943,7 +1943,7 @@ public class TestJoinUtil extends LuceneTestCase {
document.add(new SortedNumericDocValuesField(fieldName + "LONG", linkLong)); document.add(new SortedNumericDocValuesField(fieldName + "LONG", linkLong));
document.add( document.add(
new SortedNumericDocValuesField( new SortedNumericDocValuesField(
fieldName + "DOUBLE", Double.doubleToRawLongBits(linkLong))); fieldName + "DOUBLE", Double.doubleToRawLongBits((double) linkLong)));
} else { } else {
document.add(new SortedDocValuesField(fieldName, new BytesRef(linkValue))); document.add(new SortedDocValuesField(fieldName, new BytesRef(linkValue)));
document.add(new NumericDocValuesField(fieldName + "INT", linkInt)); document.add(new NumericDocValuesField(fieldName + "INT", linkInt));

View File

@ -301,8 +301,8 @@ public class TestDocValuesStatsCollector extends LuceneTestCase {
if (stats.count() > 0) { if (stats.count() > 0) {
DoubleSummaryStatistics sumStats = DoubleSummaryStatistics sumStats =
filterAndFlatValues(docValues, (v) -> v != null).summaryStatistics(); filterAndFlatValues(docValues, (v) -> v != null).summaryStatistics();
assertEquals(sumStats.getMax(), stats.max().longValue(), 0.00001); assertEquals(sumStats.getMax(), stats.max(), 0.00001);
assertEquals(sumStats.getMin(), stats.min().longValue(), 0.00001); assertEquals(sumStats.getMin(), stats.min(), 0.00001);
assertEquals(sumStats.getAverage(), stats.mean(), 0.00001); assertEquals(sumStats.getAverage(), stats.mean(), 0.00001);
assertEquals(sumStats.getSum(), stats.sum().doubleValue(), 0.00001); assertEquals(sumStats.getSum(), stats.sum().doubleValue(), 0.00001);
assertEquals(sumStats.getCount(), stats.valuesCount()); assertEquals(sumStats.getCount(), stats.valuesCount());

View File

@ -31,8 +31,8 @@ public class TestByteWritesTrackingDirectoryWrapper extends BaseDirectoryTestCas
public void testEmptyDir() throws Exception { public void testEmptyDir() throws Exception {
ByteWritesTrackingDirectoryWrapper dir = ByteWritesTrackingDirectoryWrapper dir =
new ByteWritesTrackingDirectoryWrapper(new ByteBuffersDirectory()); new ByteWritesTrackingDirectoryWrapper(new ByteBuffersDirectory());
assertEquals(0.0, dir.getFlushedBytes(), 0.0); assertEquals(0, dir.getFlushedBytes());
assertEquals(0.0, dir.getMergedBytes(), 0.0); assertEquals(0, dir.getMergedBytes());
} }
public void testRandomOutput() throws Exception { public void testRandomOutput() throws Exception {
@ -49,8 +49,8 @@ public class TestByteWritesTrackingDirectoryWrapper extends BaseDirectoryTestCas
flushBytesArr[i] = (byte) random().nextInt(127); flushBytesArr[i] = (byte) random().nextInt(127);
} }
output.writeBytes(flushBytesArr, flushBytesArr.length); output.writeBytes(flushBytesArr, flushBytesArr.length);
assertEquals(0.0, dir.getFlushedBytes(), 0.0); assertEquals(0, dir.getFlushedBytes());
assertEquals(0.0, dir.getMergedBytes(), 0.0); assertEquals(0, dir.getMergedBytes());
output.close(); output.close();
// now merge bytes // now merge bytes
@ -61,12 +61,12 @@ public class TestByteWritesTrackingDirectoryWrapper extends BaseDirectoryTestCas
mergeBytesArr[i] = (byte) random().nextInt(127); mergeBytesArr[i] = (byte) random().nextInt(127);
} }
output.writeBytes(mergeBytesArr, mergeBytesArr.length); output.writeBytes(mergeBytesArr, mergeBytesArr.length);
assertEquals(expectedFlushBytes, dir.getFlushedBytes(), 0.0); assertEquals(expectedFlushBytes, dir.getFlushedBytes());
assertEquals(0.0, dir.getMergedBytes(), 0.0); assertEquals(0, dir.getMergedBytes());
output.close(); output.close();
assertEquals(expectedFlushBytes, dir.getFlushedBytes(), 0.0); assertEquals(expectedFlushBytes, dir.getFlushedBytes());
assertEquals(expectedMergeBytes, dir.getMergedBytes(), 0.0); assertEquals(expectedMergeBytes, dir.getMergedBytes());
} }
public void testRandomTempOutput() throws Exception { public void testRandomTempOutput() throws Exception {
@ -83,8 +83,8 @@ public class TestByteWritesTrackingDirectoryWrapper extends BaseDirectoryTestCas
flushBytesArr[i] = (byte) random().nextInt(127); flushBytesArr[i] = (byte) random().nextInt(127);
} }
output.writeBytes(flushBytesArr, flushBytesArr.length); output.writeBytes(flushBytesArr, flushBytesArr.length);
assertEquals(0.0, dir.getFlushedBytes(), 0.0); assertEquals(0, dir.getFlushedBytes());
assertEquals(0.0, dir.getMergedBytes(), 0.0); assertEquals(0, dir.getMergedBytes());
output.close(); output.close();
// now merge bytes // now merge bytes
@ -96,12 +96,12 @@ public class TestByteWritesTrackingDirectoryWrapper extends BaseDirectoryTestCas
mergeBytesArr[i] = (byte) random().nextInt(127); mergeBytesArr[i] = (byte) random().nextInt(127);
} }
output.writeBytes(mergeBytesArr, mergeBytesArr.length); output.writeBytes(mergeBytesArr, mergeBytesArr.length);
assertEquals(expectedFlushBytes, dir.getFlushedBytes(), 0.0); assertEquals(expectedFlushBytes, dir.getFlushedBytes());
assertEquals(0.0, dir.getMergedBytes(), 0.0); assertEquals(0, dir.getMergedBytes());
output.close(); output.close();
assertEquals(expectedFlushBytes, dir.getFlushedBytes(), 0.0); assertEquals(expectedFlushBytes, dir.getFlushedBytes());
assertEquals(expectedMergeBytes, dir.getMergedBytes(), 0.0); assertEquals(expectedMergeBytes, dir.getMergedBytes());
} }
@Override @Override