Add stricter checking for potential coding errors

Can use via `mvn clean compile test-compile -P strict'
This commit is contained in:
Charles Allen 2015-04-08 14:11:44 -07:00
parent a609e6bcd5
commit abdeaa0746
20 changed files with 118 additions and 92 deletions

View File

@ -150,26 +150,26 @@ public class ApproximateHistogramErrorBenchmark
float err1 = 0; float err1 = 0;
float err2 = 0; float err2 = 0;
for (int j = 0; j < hcounts.length; j++) { for (int j = 0; j < hcounts.length; j++) {
err1 += Math.abs((hcounts[j] - ahcounts1[j]) / numValues); err1 += (float)Math.abs((hcounts[j] - ahcounts1[j]) / numValues);
err2 += Math.abs((hcounts[j] - ahcounts2[j]) / numValues); err2 += (float)Math.abs((hcounts[j] - ahcounts2[j]) / numValues);
} }
if (debug) { if (debug) {
float sum = 0; float sum = 0;
for (double v : hcounts) { for (double v : hcounts) {
sum += v; sum += (float)v;
} }
System.out.println("Exact Histogram Sum:"); System.out.println("Exact Histogram Sum:");
System.out.println(sum); System.out.println(sum);
sum = 0; sum = 0;
for (double v : ahcounts1) { for (double v : ahcounts1) {
sum += v; sum += (float)v;
} }
System.out.println("Approximate Histogram Sum:"); System.out.println("Approximate Histogram Sum:");
System.out.println(sum); System.out.println(sum);
sum = 0; sum = 0;
for (double v : ahcounts2) { for (double v : ahcounts2) {
sum += v; sum += (float)v;
} }
System.out.println("Approximate Histogram Rule Fold Sum:"); System.out.println("Approximate Histogram Rule Fold Sum:");
System.out.println(sum); System.out.println(sum);

View File

@ -349,7 +349,7 @@ public class IngestSegmentFirehoseFactoryTest
private static final String DIM_FLOAT_NAME = "testDimFloatName"; private static final String DIM_FLOAT_NAME = "testDimFloatName";
private static final String METRIC_LONG_NAME = "testLongMetric"; private static final String METRIC_LONG_NAME = "testLongMetric";
private static final String METRIC_FLOAT_NAME = "testFloatMetric"; private static final String METRIC_FLOAT_NAME = "testFloatMetric";
private static final Long METRIC_LONG_VALUE = 1l; private static final Long METRIC_LONG_VALUE = 1L;
private static final Float METRIC_FLOAT_VALUE = 1.0f; private static final Float METRIC_FLOAT_VALUE = 1.0f;
private static final String TIME_COLUMN = "ts"; private static final String TIME_COLUMN = "ts";
private static final Integer MAX_SHARD_NUMBER = 10; private static final Integer MAX_SHARD_NUMBER = 10;
@ -401,7 +401,7 @@ public class IngestSegmentFirehoseFactoryTest
MAX_SHARD_NUMBER MAX_SHARD_NUMBER
), ),
BINARY_VERSION, BINARY_VERSION,
0l 0L
); );
} }

26
pom.xml
View File

@ -711,4 +711,30 @@
</pluginManagement> </pluginManagement>
</build> </build>
<profiles>
<profile>
<id>strict</id>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<compilerId>javac-with-errorprone</compilerId>
<forceJavacCompilerUse>true</forceJavacCompilerUse>
</configuration>
<dependencies>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-compiler-javac-errorprone</artifactId>
<version>2.5</version>
</dependency>
</dependencies>
</plugin>
</plugins>
</pluginManagement>
</build>
</profile>
</profiles>
</project> </project>

View File

@ -27,7 +27,7 @@ public class CountBufferAggregator implements BufferAggregator
@Override @Override
public void init(ByteBuffer buf, int position) public void init(ByteBuffer buf, int position)
{ {
buf.putLong(position, 0l); buf.putLong(position, 0L);
} }
@Override @Override

View File

@ -37,7 +37,7 @@ public class LongSumBufferAggregator implements BufferAggregator
@Override @Override
public void init(ByteBuffer buf, int position) public void init(ByteBuffer buf, int position)
{ {
buf.putLong(position, 0l); buf.putLong(position, 0L);
} }
@Override @Override

View File

@ -279,7 +279,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
byte lookupVal = ByteBitLookup.lookup[UnsignedBytes.toInt(hashedValue[i])]; byte lookupVal = ByteBitLookup.lookup[UnsignedBytes.toInt(hashedValue[i])];
switch (lookupVal) { switch (lookupVal) {
case 0: case 0:
positionOf1 += 8; positionOf1 += (byte)8;
continue; continue;
default: default:
positionOf1 += lookupVal; positionOf1 += lookupVal;
@ -640,7 +640,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
* @param offsetDiff The difference in offset between the byteToAdd and the current HyperLogLogCollector * @param offsetDiff The difference in offset between the byteToAdd and the current HyperLogLogCollector
* @param byteToAdd The byte to merge into the current HyperLogLogCollector * @param byteToAdd The byte to merge into the current HyperLogLogCollector
*/ */
private static int mergeAndStoreByteRegister( private static short mergeAndStoreByteRegister(
final ByteBuffer storageBuffer, final ByteBuffer storageBuffer,
final int position, final int position,
final int offsetDiff, final int offsetDiff,
@ -665,7 +665,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
storageBuffer.put(position, (byte) ((newUpper | newLower) & 0xff)); storageBuffer.put(position, (byte) ((newUpper | newLower) & 0xff));
int numNoLongerZero = 0; short numNoLongerZero = 0;
if (upperNibble == 0 && newUpper > 0) { if (upperNibble == 0 && newUpper > 0) {
++numNoLongerZero; ++numNoLongerZero;
} }

View File

@ -501,7 +501,7 @@ public class QueryGranularityTest
json = "{ \"type\": \"period\", \"period\": \"P1D\"," json = "{ \"type\": \"period\", \"period\": \"P1D\","
+ "\"timeZone\": \"America/Los_Angeles\", \"origin\": \"1970-01-01T00:00:00Z\"}"; + "\"timeZone\": \"America/Los_Angeles\", \"origin\": \"1970-01-01T00:00:00Z\"}";
gran = mapper.readValue(json, QueryGranularity.class); gran = mapper.readValue(json, QueryGranularity.class);
Assert.assertEquals(new PeriodGranularity(new Period("P1D"), new DateTime(0l), DateTimeZone.forID("America/Los_Angeles")), gran); Assert.assertEquals(new PeriodGranularity(new Period("P1D"), new DateTime(0L), DateTimeZone.forID("America/Los_Angeles")), gran);
QueryGranularity expected = new PeriodGranularity( QueryGranularity expected = new PeriodGranularity(
new Period("P1D"), new Period("P1D"),

View File

@ -33,17 +33,17 @@ public class CountAggregatorTest
Assert.assertEquals("billy", agg.getName()); Assert.assertEquals("billy", agg.getName());
Assert.assertEquals(0l, agg.get()); Assert.assertEquals(0L, agg.get());
Assert.assertEquals(0l, agg.get()); Assert.assertEquals(0L, agg.get());
Assert.assertEquals(0l, agg.get()); Assert.assertEquals(0L, agg.get());
agg.aggregate(); agg.aggregate();
Assert.assertEquals(1l, agg.get()); Assert.assertEquals(1L, agg.get());
Assert.assertEquals(1l, agg.get()); Assert.assertEquals(1L, agg.get());
Assert.assertEquals(1l, agg.get()); Assert.assertEquals(1L, agg.get());
agg.aggregate(); agg.aggregate();
Assert.assertEquals(2l, agg.get()); Assert.assertEquals(2L, agg.get());
Assert.assertEquals(2l, agg.get()); Assert.assertEquals(2L, agg.get());
Assert.assertEquals(2l, agg.get()); Assert.assertEquals(2L, agg.get());
} }
@Test @Test

View File

@ -40,17 +40,17 @@ public class LongSumAggregatorTest
Assert.assertEquals("billy", agg.getName()); Assert.assertEquals("billy", agg.getName());
Assert.assertEquals(0l, agg.get()); Assert.assertEquals(0L, agg.get());
Assert.assertEquals(0l, agg.get()); Assert.assertEquals(0L, agg.get());
Assert.assertEquals(0l, agg.get()); Assert.assertEquals(0L, agg.get());
aggregate(selector, agg); aggregate(selector, agg);
Assert.assertEquals(24l, agg.get()); Assert.assertEquals(24L, agg.get());
Assert.assertEquals(24l, agg.get()); Assert.assertEquals(24L, agg.get());
Assert.assertEquals(24l, agg.get()); Assert.assertEquals(24L, agg.get());
aggregate(selector, agg); aggregate(selector, agg);
Assert.assertEquals(44l, agg.get()); Assert.assertEquals(44L, agg.get());
Assert.assertEquals(44l, agg.get()); Assert.assertEquals(44L, agg.get());
Assert.assertEquals(44l, agg.get()); Assert.assertEquals(44L, agg.get());
} }
@Test @Test

View File

@ -482,7 +482,7 @@ public class HyperLogLogCollectorTest
} }
final short numNonZeroInRemaining = computeNumNonZero((byte) remainingBytes); final short numNonZeroInRemaining = computeNumNonZero((byte) remainingBytes);
numNonZero += (HyperLogLogCollector.NUM_BYTES_FOR_BUCKETS - initialBytes.length) * numNonZeroInRemaining; numNonZero += (short)((HyperLogLogCollector.NUM_BYTES_FOR_BUCKETS - initialBytes.length) * numNonZeroInRemaining);
ByteBuffer biggerOffset = ByteBuffer.allocate(HyperLogLogCollector.getLatestNumBytesForDenseStorage()); ByteBuffer biggerOffset = ByteBuffer.allocate(HyperLogLogCollector.getLatestNumBytesForDenseStorage());
biggerOffset.put(HLLCV1.VERSION); biggerOffset.put(HLLCV1.VERSION);
@ -577,7 +577,7 @@ public class HyperLogLogCollectorTest
@Test @Test
public void testEstimation() throws Exception public void testEstimation() throws Exception
{ {
Random random = new Random(0l); Random random = new Random(0L);
final int[] valsToCheck = {10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000, 2000000}; final int[] valsToCheck = {10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000, 2000000};
final double[] expectedVals = { final double[] expectedVals = {
@ -603,7 +603,7 @@ public class HyperLogLogCollectorTest
@Test @Test
public void testEstimationReadOnlyByteBuffers() throws Exception public void testEstimationReadOnlyByteBuffers() throws Exception
{ {
Random random = new Random(0l); Random random = new Random(0L);
final int[] valsToCheck = {10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000, 2000000}; final int[] valsToCheck = {10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000, 2000000};
final double[] expectedVals = { final double[] expectedVals = {
@ -633,7 +633,7 @@ public class HyperLogLogCollectorTest
@Test @Test
public void testEstimationLimitDifferentFromCapacity() throws Exception public void testEstimationLimitDifferentFromCapacity() throws Exception
{ {
Random random = new Random(0l); Random random = new Random(0L);
final int[] valsToCheck = {10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000, 2000000}; final int[] valsToCheck = {10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000, 2000000};
final double[] expectedVals = { final double[] expectedVals = {

View File

@ -34,7 +34,7 @@ public class HyperUniqueFinalizingPostAggregatorTest
@Test @Test
public void testCompute() throws Exception public void testCompute() throws Exception
{ {
Random random = new Random(0l); Random random = new Random(0L);
HyperUniqueFinalizingPostAggregator postAggregator = new HyperUniqueFinalizingPostAggregator( HyperUniqueFinalizingPostAggregator postAggregator = new HyperUniqueFinalizingPostAggregator(
"uniques" "uniques"
); );

View File

@ -1210,8 +1210,8 @@ public class GroupByQueryRunnerTest
public void testPostAggMergedHavingSpec() public void testPostAggMergedHavingSpec()
{ {
List<Row> expectedResults = Arrays.asList( List<Row> expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "index", 4420L, QueryRunnerTestHelper.addRowsIndexConstantMetric,(double) (6l+4420l+1l)), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "index", 4420L, QueryRunnerTestHelper.addRowsIndexConstantMetric,(double) (6L+4420L+1L)),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "index", 4416L, QueryRunnerTestHelper.addRowsIndexConstantMetric, (double) (6l+4416l+1l)) GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "index", 4416L, QueryRunnerTestHelper.addRowsIndexConstantMetric, (double) (6L+4416L+1L))
); );
GroupByQuery.Builder builder = GroupByQuery GroupByQuery.Builder builder = GroupByQuery
@ -1274,7 +1274,7 @@ public class GroupByQueryRunnerTest
"index", "index",
4420L, 4420L,
QueryRunnerTestHelper.addRowsIndexConstantMetric, QueryRunnerTestHelper.addRowsIndexConstantMetric,
(double) (6l + 4420l + 1l) (double) (6L + 4420L + 1L)
), ),
GroupByQueryRunnerTestHelper.createExpectedRow( GroupByQueryRunnerTestHelper.createExpectedRow(
"2011-04-01", "2011-04-01",
@ -1285,7 +1285,7 @@ public class GroupByQueryRunnerTest
"index", "index",
4416L, 4416L,
QueryRunnerTestHelper.addRowsIndexConstantMetric, QueryRunnerTestHelper.addRowsIndexConstantMetric,
(double) (6l + 4416l + 1l) (double) (6L + 4416L + 1L)
) )
); );

View File

@ -131,7 +131,7 @@ public class BenchmarkIndexibleWrites extends AbstractBenchmark
private static Boolean wasCopying(Long val) private static Boolean wasCopying(Long val)
{ {
return (val & 1l) > 0; return (val & 1L) > 0;
} }
@Override @Override

View File

@ -182,7 +182,7 @@ public class IncrementalIndexTest
for (int i = 0; i < dimensionCount; i++) { for (int i = 0; i < dimensionCount; i++) {
String dimName = String.format("Dim_%d", i); String dimName = String.format("Dim_%d", i);
dimensionList.add(dimName); dimensionList.add(dimName);
builder.put(dimName, (Long) 1l); builder.put(dimName, (Long) 1L);
} }
return new MapBasedInputRow(timestamp, dimensionList, builder.build()); return new MapBasedInputRow(timestamp, dimensionList, builder.build());
} }

View File

@ -133,7 +133,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-01").toString(), "timestamp", new DateTime("2013-01-01").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "0.0,0.0", "dim.geo", "0.0,0.0",
"val", 17l "val", 17L
) )
) )
); );
@ -145,7 +145,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-02").toString(), "timestamp", new DateTime("2013-01-02").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "1.0,3.0", "dim.geo", "1.0,3.0",
"val", 29l "val", 29L
) )
) )
); );
@ -157,7 +157,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-03").toString(), "timestamp", new DateTime("2013-01-03").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "4.0,2.0", "dim.geo", "4.0,2.0",
"val", 13l "val", 13L
) )
) )
); );
@ -169,7 +169,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-04").toString(), "timestamp", new DateTime("2013-01-04").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "7.0,3.0", "dim.geo", "7.0,3.0",
"val", 91l "val", 91L
) )
) )
); );
@ -181,7 +181,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-05").toString(), "timestamp", new DateTime("2013-01-05").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "8.0,6.0", "dim.geo", "8.0,6.0",
"val", 47l "val", 47L
) )
) )
); );
@ -193,7 +193,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-05").toString(), "timestamp", new DateTime("2013-01-05").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "_mmx.unknown", "dim.geo", "_mmx.unknown",
"val", 501l "val", 501L
) )
) )
); );
@ -306,7 +306,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-01").toString(), "timestamp", new DateTime("2013-01-01").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "0.0,0.0", "dim.geo", "0.0,0.0",
"val", 17l "val", 17L
) )
) )
); );
@ -318,7 +318,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-02").toString(), "timestamp", new DateTime("2013-01-02").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "1.0,3.0", "dim.geo", "1.0,3.0",
"val", 29l "val", 29L
) )
) )
); );
@ -330,7 +330,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-03").toString(), "timestamp", new DateTime("2013-01-03").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "4.0,2.0", "dim.geo", "4.0,2.0",
"val", 13l "val", 13L
) )
) )
); );
@ -342,7 +342,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-05").toString(), "timestamp", new DateTime("2013-01-05").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "_mmx.unknown", "dim.geo", "_mmx.unknown",
"val", 501l "val", 501L
) )
) )
); );
@ -354,7 +354,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-04").toString(), "timestamp", new DateTime("2013-01-04").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "7.0,3.0", "dim.geo", "7.0,3.0",
"val", 91l "val", 91L
) )
) )
); );
@ -366,7 +366,7 @@ public class SpatialFilterBonusTest
"timestamp", new DateTime("2013-01-05").toString(), "timestamp", new DateTime("2013-01-05").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "8.0,6.0", "dim.geo", "8.0,6.0",
"val", 47l "val", 47L
) )
) )
); );
@ -456,7 +456,7 @@ public class SpatialFilterBonusTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 3L) .put("rows", 3L)
.put("val", 59l) .put("val", 59L)
.build() .build()
) )
) )
@ -508,7 +508,7 @@ public class SpatialFilterBonusTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 17l) .put("val", 17L)
.build() .build()
) )
), ),
@ -517,7 +517,7 @@ public class SpatialFilterBonusTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 29l) .put("val", 29L)
.build() .build()
) )
), ),
@ -526,7 +526,7 @@ public class SpatialFilterBonusTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 13l) .put("val", 13L)
.build() .build()
) )
), ),
@ -535,7 +535,7 @@ public class SpatialFilterBonusTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 91l) .put("val", 91L)
.build() .build()
) )
), ),
@ -544,7 +544,7 @@ public class SpatialFilterBonusTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 47l) .put("val", 47L)
.build() .build()
) )
) )

View File

@ -136,7 +136,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", 0.0f, "lat", 0.0f,
"long", 0.0f, "long", 0.0f,
"val", 17l "val", 17L
) )
) )
); );
@ -149,7 +149,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", 1.0f, "lat", 1.0f,
"long", 3.0f, "long", 3.0f,
"val", 29l "val", 29L
) )
) )
); );
@ -162,7 +162,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", 4.0f, "lat", 4.0f,
"long", 2.0f, "long", 2.0f,
"val", 13l "val", 13L
) )
) )
); );
@ -175,7 +175,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", 7.0f, "lat", 7.0f,
"long", 3.0f, "long", 3.0f,
"val", 91l "val", 91L
) )
) )
); );
@ -188,7 +188,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", 8.0f, "lat", 8.0f,
"long", 6.0f, "long", 6.0f,
"val", 47l "val", 47L
) )
) )
); );
@ -201,7 +201,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", "_mmx.unknown", "lat", "_mmx.unknown",
"long", "_mmx.unknown", "long", "_mmx.unknown",
"val", 101l "val", 101L
) )
) )
); );
@ -213,7 +213,7 @@ public class SpatialFilterTest
"timestamp", new DateTime("2013-01-05").toString(), "timestamp", new DateTime("2013-01-05").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "_mmx.unknown", "dim.geo", "_mmx.unknown",
"val", 501l "val", 501L
) )
) )
); );
@ -225,7 +225,7 @@ public class SpatialFilterTest
"timestamp", new DateTime("2013-01-05").toString(), "timestamp", new DateTime("2013-01-05").toString(),
"lat2", 0.0f, "lat2", 0.0f,
"long2", 0.0f, "long2", 0.0f,
"val", 13l "val", 13L
) )
) )
); );
@ -349,7 +349,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", 0.0f, "lat", 0.0f,
"long", 0.0f, "long", 0.0f,
"val", 17l "val", 17L
) )
) )
); );
@ -362,7 +362,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", 1.0f, "lat", 1.0f,
"long", 3.0f, "long", 3.0f,
"val", 29l "val", 29L
) )
) )
); );
@ -375,7 +375,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", 4.0f, "lat", 4.0f,
"long", 2.0f, "long", 2.0f,
"val", 13l "val", 13L
) )
) )
); );
@ -388,7 +388,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", "_mmx.unknown", "lat", "_mmx.unknown",
"long", "_mmx.unknown", "long", "_mmx.unknown",
"val", 101l "val", 101L
) )
) )
); );
@ -400,7 +400,7 @@ public class SpatialFilterTest
"timestamp", new DateTime("2013-01-05").toString(), "timestamp", new DateTime("2013-01-05").toString(),
"dim", "foo", "dim", "foo",
"dim.geo", "_mmx.unknown", "dim.geo", "_mmx.unknown",
"val", 501l "val", 501L
) )
) )
); );
@ -413,7 +413,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", 7.0f, "lat", 7.0f,
"long", 3.0f, "long", 3.0f,
"val", 91l "val", 91L
) )
) )
); );
@ -426,7 +426,7 @@ public class SpatialFilterTest
"dim", "foo", "dim", "foo",
"lat", 8.0f, "lat", 8.0f,
"long", 6.0f, "long", 6.0f,
"val", 47l "val", 47L
) )
) )
); );
@ -438,7 +438,7 @@ public class SpatialFilterTest
"timestamp", new DateTime("2013-01-05").toString(), "timestamp", new DateTime("2013-01-05").toString(),
"lat2", 0.0f, "lat2", 0.0f,
"long2", 0.0f, "long2", 0.0f,
"val", 13l "val", 13L
) )
) )
); );
@ -532,7 +532,7 @@ public class SpatialFilterTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 3L) .put("rows", 3L)
.put("val", 59l) .put("val", 59L)
.build() .build()
) )
) )
@ -585,7 +585,7 @@ public class SpatialFilterTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 13l) .put("val", 13L)
.build() .build()
) )
) )
@ -637,7 +637,7 @@ public class SpatialFilterTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 17l) .put("val", 17L)
.build() .build()
) )
), ),
@ -646,7 +646,7 @@ public class SpatialFilterTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 29l) .put("val", 29L)
.build() .build()
) )
), ),
@ -655,7 +655,7 @@ public class SpatialFilterTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 13l) .put("val", 13L)
.build() .build()
) )
), ),
@ -664,7 +664,7 @@ public class SpatialFilterTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 91l) .put("val", 91L)
.build() .build()
) )
), ),
@ -673,7 +673,7 @@ public class SpatialFilterTest
new TimeseriesResultValue( new TimeseriesResultValue(
ImmutableMap.<String, Object>builder() ImmutableMap.<String, Object>builder()
.put("rows", 1L) .put("rows", 1L)
.put("val", 47l) .put("val", 47L)
.build() .build()
) )
) )

View File

@ -153,10 +153,10 @@ public class IncrementalIndexStorageAdapterTest
Assert.assertEquals(2, results.size()); Assert.assertEquals(2, results.size());
MapBasedRow row = (MapBasedRow) results.get(0); MapBasedRow row = (MapBasedRow) results.get(0);
Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1l), row.getEvent()); Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());
row = (MapBasedRow) results.get(1); row = (MapBasedRow) results.get(1);
Assert.assertEquals(ImmutableMap.of("sally", "bo", "cnt", 1l), row.getEvent()); Assert.assertEquals(ImmutableMap.of("sally", "bo", "cnt", 1L), row.getEvent());
} }
@Test @Test
@ -211,10 +211,10 @@ public class IncrementalIndexStorageAdapterTest
Assert.assertEquals(2, results.size()); Assert.assertEquals(2, results.size());
MapBasedRow row = (MapBasedRow) results.get(0); MapBasedRow row = (MapBasedRow) results.get(0);
Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1l, "fieldLength", 2.0), row.getEvent()); Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L, "fieldLength", 2.0), row.getEvent());
row = (MapBasedRow) results.get(1); row = (MapBasedRow) results.get(1);
Assert.assertEquals(ImmutableMap.of("billy", "hip", "sally", "hop", "cnt", 1l, "fieldLength", 6.0), row.getEvent()); Assert.assertEquals(ImmutableMap.of("billy", "hip", "sally", "hop", "cnt", 1L, "fieldLength", 6.0), row.getEvent());
} }
private static GroupByQueryEngine makeGroupByQueryEngine() private static GroupByQueryEngine makeGroupByQueryEngine()
@ -382,6 +382,6 @@ public class IncrementalIndexStorageAdapterTest
Assert.assertEquals(1, results.size()); Assert.assertEquals(1, results.size());
MapBasedRow row = (MapBasedRow) results.get(0); MapBasedRow row = (MapBasedRow) results.get(0);
Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1l), row.getEvent()); Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());
} }
} }

View File

@ -33,7 +33,7 @@ public class StorageLocationTest
@Test @Test
public void testStorageLocation() throws Exception public void testStorageLocation() throws Exception
{ {
long expectedAvail = 1000l; long expectedAvail = 1000L;
StorageLocation loc = new StorageLocation(new File("/tmp"), expectedAvail); StorageLocation loc = new StorageLocation(new File("/tmp"), expectedAvail);
verifyLoc(expectedAvail, loc); verifyLoc(expectedAvail, loc);

View File

@ -464,7 +464,7 @@ public class ServerManagerTest
Arrays.asList("metric1", "metric2"), Arrays.asList("metric1", "metric2"),
new NoneShardSpec(), new NoneShardSpec(),
IndexIO.CURRENT_VERSION_ID, IndexIO.CURRENT_VERSION_ID,
123l 123L
) )
); );
} }
@ -486,7 +486,7 @@ public class ServerManagerTest
Arrays.asList("metric1", "metric2"), Arrays.asList("metric1", "metric2"),
new NoneShardSpec(), new NoneShardSpec(),
IndexIO.CURRENT_VERSION_ID, IndexIO.CURRENT_VERSION_ID,
123l 123L
) )
); );
} }

View File

@ -232,7 +232,7 @@ public class ZkCoordinatorTest extends CuratorTestBase
Arrays.asList("metric1", "metric2"), Arrays.asList("metric1", "metric2"),
new NoneShardSpec(), new NoneShardSpec(),
IndexIO.CURRENT_VERSION_ID, IndexIO.CURRENT_VERSION_ID,
123l 123L
); );
} }