Prohibit jackson ObjectMapper#reader methods which are deprecated (#6386)

* Prohibit jackson ObjectMapper#reader methods which are deprecated

* address comments
This commit is contained in:
QiuMM 2018-10-04 04:55:20 +08:00 committed by Roman Leventov
parent 3ae563263a
commit 0b8085aff7
25 changed files with 38 additions and 35 deletions

View File

@ -8,4 +8,7 @@ org.apache.commons.io.FileUtils#getTempDirectory() @ Use org.junit.rules.Tempora
java.util.LinkedList @ Use ArrayList or ArrayDeque instead
com.google.common.collect.Lists#newLinkedList() @ Use ArrayList or ArrayDeque instead
com.google.common.collect.Lists#newLinkedList(java.lang.Iterable) @ Use ArrayList or ArrayDeque instead
java.util.Random#<init>() @ Use ThreadLocalRandom.current() or the constructor with a seed (the latter in tests only!)
java.util.Random#<init>() @ Use ThreadLocalRandom.current() or the constructor with a seed (the latter in tests only!)
com.fasterxml.jackson.databind.ObjectMapper#reader(com.fasterxml.jackson.databind.JavaType) @ Use ObjectMapper#readerFor instead
com.fasterxml.jackson.databind.ObjectMapper#reader(java.lang.Class) @ Use ObjectMapper#readerFor instead
com.fasterxml.jackson.databind.ObjectMapper#reader(com.fasterxml.jackson.core.type.TypeReference) @ Use ObjectMapper#readerFor instead

View File

@ -218,7 +218,7 @@ public class WhiteListBasedDruidToTimelineEventConverter implements DruidToTimel
} else {
fileContent = Files.asCharSource(new File(mapPath), StandardCharsets.UTF_8).read();
}
return mapper.reader(new TypeReference<ImmutableSortedMap<String, ImmutableList<String>>>()
return mapper.readerFor(new TypeReference<ImmutableSortedMap<String, ImmutableList<String>>>()
{
}).readValue(fileContent);
}

View File

@ -60,7 +60,7 @@ public class AmbariMetricsEmitterConfigTest
500L,
400L
);
AmbariMetricsEmitterConfig serde = mapper.reader(AmbariMetricsEmitterConfig.class).readValue(
AmbariMetricsEmitterConfig serde = mapper.readerFor(AmbariMetricsEmitterConfig.class).readValue(
mapper.writeValueAsBytes(config)
);
Assert.assertEquals(config, serde);
@ -70,8 +70,8 @@ public class AmbariMetricsEmitterConfigTest
public void testSerDeDruidToTimelineEventConverter() throws IOException
{
SendAllTimelineEventConverter sendAllConverter = new SendAllTimelineEventConverter("prefix", "druid");
DruidToTimelineMetricConverter serde = mapper.reader(DruidToTimelineMetricConverter.class)
.readValue(mapper.writeValueAsBytes(sendAllConverter));
DruidToTimelineMetricConverter serde = mapper.readerFor(DruidToTimelineMetricConverter.class)
.readValue(mapper.writeValueAsBytes(sendAllConverter));
Assert.assertEquals(sendAllConverter, serde);
WhiteListBasedDruidToTimelineEventConverter whiteListBasedDruidToTimelineEventConverter = new WhiteListBasedDruidToTimelineEventConverter(
@ -80,9 +80,9 @@ public class AmbariMetricsEmitterConfigTest
"",
new DefaultObjectMapper()
);
serde = mapper.reader(DruidToTimelineMetricConverter.class)
.readValue(mapper.writeValueAsBytes(
whiteListBasedDruidToTimelineEventConverter));
serde = mapper.readerFor(DruidToTimelineMetricConverter.class)
.readValue(mapper.writeValueAsBytes(
whiteListBasedDruidToTimelineEventConverter));
Assert.assertEquals(whiteListBasedDruidToTimelineEventConverter, serde);
}
}

View File

@ -282,7 +282,7 @@ public class WhiteListBasedConverter implements DruidToGraphiteEventConverter
} else {
fileContent = Files.asCharSource(new File(mapPath), Charset.forName("UTF-8")).read();
}
return mapper.reader(new TypeReference<ImmutableSortedMap<String, ImmutableSet<String>>>()
return mapper.readerFor(new TypeReference<ImmutableSortedMap<String, ImmutableSet<String>>>()
{
}).readValue(fileContent);
}

View File

@ -59,7 +59,7 @@ public class GraphiteEmitterConfigTest
null
);
String graphiteEmitterConfigString = mapper.writeValueAsString(graphiteEmitterConfig);
GraphiteEmitterConfig graphiteEmitterConfigExpected = mapper.reader(GraphiteEmitterConfig.class).readValue(
GraphiteEmitterConfig graphiteEmitterConfigExpected = mapper.readerFor(GraphiteEmitterConfig.class).readValue(
graphiteEmitterConfigString
);
Assert.assertEquals(graphiteEmitterConfigExpected, graphiteEmitterConfig);
@ -75,7 +75,7 @@ public class GraphiteEmitterConfigTest
false
);
String noopGraphiteEventConverterString = mapper.writeValueAsString(sendAllGraphiteEventConverter);
DruidToGraphiteEventConverter druidToGraphiteEventConverter = mapper.reader(DruidToGraphiteEventConverter.class)
DruidToGraphiteEventConverter druidToGraphiteEventConverter = mapper.readerFor(DruidToGraphiteEventConverter.class)
.readValue(noopGraphiteEventConverterString);
Assert.assertEquals(druidToGraphiteEventConverter, sendAllGraphiteEventConverter);
@ -88,7 +88,7 @@ public class GraphiteEmitterConfigTest
new DefaultObjectMapper()
);
String whiteListBasedConverterString = mapper.writeValueAsString(whiteListBasedConverter);
druidToGraphiteEventConverter = mapper.reader(DruidToGraphiteEventConverter.class)
druidToGraphiteEventConverter = mapper.readerFor(DruidToGraphiteEventConverter.class)
.readValue(whiteListBasedConverterString);
Assert.assertEquals(druidToGraphiteEventConverter, whiteListBasedConverter);
}

View File

@ -48,7 +48,7 @@ public class KafkaEmitterConfigTest
.put("testKey", "testValue").build()
);
String kafkaEmitterConfigString = mapper.writeValueAsString(kafkaEmitterConfig);
KafkaEmitterConfig kafkaEmitterConfigExpected = mapper.reader(KafkaEmitterConfig.class)
KafkaEmitterConfig kafkaEmitterConfigExpected = mapper.readerFor(KafkaEmitterConfig.class)
.readValue(kafkaEmitterConfigString);
Assert.assertEquals(kafkaEmitterConfigExpected, kafkaEmitterConfig);
}

View File

@ -104,7 +104,7 @@ public class EventConverter
log.info("Using default metric map located at [%s]", metricMapPath);
is = new FileInputStream(new File(metricMapPath));
}
return mapper.reader(new TypeReference<Map<String, Set<String>>>()
return mapper.readerFor(new TypeReference<Map<String, Set<String>>>()
{
}).readValue(is);
}

View File

@ -41,7 +41,7 @@ public class OpentsdbEmitterConfigTest
{
OpentsdbEmitterConfig opentsdbEmitterConfig = new OpentsdbEmitterConfig("localhost", 9999, 2000, 2000, 200, 2000, 10000L, null);
String opentsdbEmitterConfigString = mapper.writeValueAsString(opentsdbEmitterConfig);
OpentsdbEmitterConfig expectedOpentsdbEmitterConfig = mapper.reader(OpentsdbEmitterConfig.class)
OpentsdbEmitterConfig expectedOpentsdbEmitterConfig = mapper.readerFor(OpentsdbEmitterConfig.class)
.readValue(opentsdbEmitterConfigString);
Assert.assertEquals(expectedOpentsdbEmitterConfig, opentsdbEmitterConfig);
}

View File

@ -47,7 +47,7 @@ public class OpentsdbEventTest
tags.put("baz", 1);
OpentsdbEvent opentsdbEvent = new OpentsdbEvent("foo.bar", 1000L, 20, tags);
String opentsdbString = mapper.writeValueAsString(opentsdbEvent);
OpentsdbEvent expectedOpentsdbEvent = mapper.reader(OpentsdbEvent.class)
OpentsdbEvent expectedOpentsdbEvent = mapper.readerFor(OpentsdbEvent.class)
.readValue(opentsdbString);
Assert.assertEquals(expectedOpentsdbEvent, opentsdbEvent);
}

View File

@ -85,7 +85,7 @@ public class DimensionConverter
log.info("Using metric dimensions at types at [%s]", dimensionMapPath);
is = new FileInputStream(new File(dimensionMapPath));
}
return mapper.reader(new TypeReference<Map<String, StatsDMetric>>()
return mapper.readerFor(new TypeReference<Map<String, StatsDMetric>>()
{
}).readValue(is);
}

View File

@ -35,7 +35,7 @@ public class HdfsKerberosConfigTest
HdfsKerberosConfig hdfsKerberosConfig = new HdfsKerberosConfig("principal", "keytab");
Assert.assertEquals(
hdfsKerberosConfig,
mapper.reader(HdfsKerberosConfig.class).readValue(mapper.writeValueAsString(hdfsKerberosConfig))
mapper.readerFor(HdfsKerberosConfig.class).readValue(mapper.writeValueAsString(hdfsKerberosConfig))
);
}

View File

@ -126,7 +126,7 @@ public class LoadingLookupFactoryTest
mapper.registerSubtypes(LoadingLookupFactory.class);
Assert.assertEquals(
loadingLookupFactory,
mapper.reader(LookupExtractorFactory.class)
mapper.readerFor(LookupExtractorFactory.class)
.readValue(mapper.writeValueAsString(loadingLookupFactory))
);
}

View File

@ -67,7 +67,7 @@ public class PollingLookupSerDeserTest
PollingLookupFactory pollingLookupFactory = new PollingLookupFactory(Period.ZERO, dataFetcher, cacheFactory);
mapper.registerSubtypes(MockDataFetcher.class);
mapper.registerSubtypes(PollingLookupFactory.class);
Assert.assertEquals(pollingLookupFactory, mapper.reader(LookupExtractorFactory.class).readValue(mapper.writeValueAsString(pollingLookupFactory)));
Assert.assertEquals(pollingLookupFactory, mapper.readerFor(LookupExtractorFactory.class).readValue(mapper.writeValueAsString(pollingLookupFactory)));
}
@JsonTypeName("mock")

View File

@ -160,8 +160,8 @@ public class LoadingCacheTest
public void testSerDeser() throws IOException
{
ObjectMapper mapper = new DefaultObjectMapper();
Assert.assertEquals(loadingCache, mapper.reader(LoadingCache.class).readValue(mapper.writeValueAsString(loadingCache)));
Assert.assertTrue(loadingCache.hashCode() == mapper.reader(LoadingCache.class).readValue(mapper.writeValueAsString(loadingCache)).hashCode());
Assert.assertEquals(loadingCache, mapper.readerFor(LoadingCache.class).readValue(mapper.writeValueAsString(loadingCache)));
Assert.assertEquals(loadingCache.hashCode(), mapper.readerFor(LoadingCache.class).readValue(mapper.writeValueAsString(loadingCache)).hashCode());
}
}

View File

@ -145,7 +145,7 @@ public class JdbcDataFetcherTest
100);
DefaultObjectMapper mapper = new DefaultObjectMapper();
String jdbcDataFetcherSer = mapper.writeValueAsString(jdbcDataFetcher);
Assert.assertEquals(jdbcDataFetcher, mapper.reader(DataFetcher.class).readValue(jdbcDataFetcherSer));
Assert.assertEquals(jdbcDataFetcher, mapper.readerFor(DataFetcher.class).readValue(jdbcDataFetcherSer));
}
private void assertMapLookup(Map<String, String> map, DataFetcher dataFetcher)

View File

@ -35,7 +35,7 @@ public class HadoopKerberosConfigTest
HadoopKerberosConfig hadoopKerberosConfig = new HadoopKerberosConfig("principal", "keytab");
Assert.assertEquals(
hadoopKerberosConfig,
mapper.reader(HadoopKerberosConfig.class).readValue(mapper.writeValueAsString(hadoopKerberosConfig))
mapper.readerFor(HadoopKerberosConfig.class).readValue(mapper.writeValueAsString(hadoopKerberosConfig))
);
}
}

View File

@ -55,7 +55,7 @@ public class MapLookupExtractionFnSerDeTest
@Test
public void testDeserialization() throws IOException
{
final DimExtractionFn fn = mapper.reader(DimExtractionFn.class).readValue(
final DimExtractionFn fn = mapper.readerFor(DimExtractionFn.class).readValue(
StringUtils.format(
"{\"type\":\"lookup\",\"lookup\":{\"type\":\"map\", \"map\":%s}}",
mapper.writeValueAsString(renames)
@ -68,7 +68,7 @@ public class MapLookupExtractionFnSerDeTest
Assert.assertEquals(null, fn.apply(crazyString));
Assert.assertEquals(
crazyString, mapper.reader(DimExtractionFn.class).<DimExtractionFn>readValue(
crazyString, mapper.readerFor(DimExtractionFn.class).<DimExtractionFn>readValue(
StringUtils.format(
"{\"type\":\"lookup\",\"lookup\":{\"type\":\"map\", \"map\":%s}, \"retainMissingValue\":true}",
mapper.writeValueAsString(renames)

View File

@ -82,7 +82,7 @@ public class BoundDimFilterTest
Injector defaultInjector = GuiceInjectors.makeStartupInjector();
ObjectMapper mapper = defaultInjector.getInstance(Key.get(ObjectMapper.class, Json.class));
String serBetweenDimFilter = mapper.writeValueAsString(boundDimFilter);
BoundDimFilter actualBoundDimFilter = mapper.reader(DimFilter.class).readValue(serBetweenDimFilter);
BoundDimFilter actualBoundDimFilter = mapper.readerFor(DimFilter.class).readValue(serBetweenDimFilter);
Assert.assertEquals(boundDimFilter, actualBoundDimFilter);
}

View File

@ -48,7 +48,7 @@ public class InDimFilterSerDesrTest
@Test
public void testDeserialization() throws IOException
{
final InDimFilter actualInDimFilter = mapper.reader(DimFilter.class).readValue(actualInFilter);
final InDimFilter actualInDimFilter = mapper.readerFor(DimFilter.class).readValue(actualInFilter);
final InDimFilter expectedInDimFilter = new InDimFilter("dimTest", Arrays.asList("good", "bad"), null);
Assert.assertEquals(expectedInDimFilter, actualInDimFilter);
}

View File

@ -58,7 +58,7 @@ public class IntervalDimFilterTest
null
);
String filterStr = mapper.writeValueAsString(intervalFilter);
IntervalDimFilter actualFilter = mapper.reader(DimFilter.class).readValue(filterStr);
IntervalDimFilter actualFilter = mapper.readerFor(DimFilter.class).readValue(filterStr);
Assert.assertEquals(intervalFilter, actualFilter);
intervalFilter = new IntervalDimFilter(
@ -71,7 +71,7 @@ public class IntervalDimFilterTest
);
filterStr = mapper.writeValueAsString(intervalFilter);
actualFilter = mapper.reader(DimFilter.class).readValue(filterStr);
actualFilter = mapper.readerFor(DimFilter.class).readValue(filterStr);
Assert.assertEquals(intervalFilter, actualFilter);
}

View File

@ -41,7 +41,7 @@ public class LookupConfigTest
LookupConfig lookupConfig = new LookupConfig(temporaryFolder.newFile().getAbsolutePath());
Assert.assertEquals(
lookupConfig,
mapper.reader(LookupConfig.class).readValue(mapper.writeValueAsString(lookupConfig))
mapper.readerFor(LookupConfig.class).readValue(mapper.writeValueAsString(lookupConfig))
);
}

View File

@ -61,7 +61,7 @@ public class LookupExtractorTest
public void testSerDes() throws IOException
{
ObjectMapper mapper = new DefaultObjectMapper();
Assert.assertEquals(lookupExtractor, mapper.reader(LookupExtractor.class).readValue(mapper.writeValueAsBytes(lookupExtractor)));
Assert.assertEquals(lookupExtractor, mapper.readerFor(LookupExtractor.class).readValue(mapper.writeValueAsBytes(lookupExtractor)));
}
@Test

View File

@ -78,7 +78,7 @@ public class LookupDimensionSpecTest
LOOKUP_REF_MANAGER
);
String serLookup = mapper.writeValueAsString(lookupDimSpec);
Assert.assertEquals(lookupDimSpec, mapper.reader(DimensionSpec.class).with(injectableValues).readValue(serLookup));
Assert.assertEquals(lookupDimSpec, mapper.readerFor(DimensionSpec.class).with(injectableValues).readValue(serLookup));
}
private Object[] parametersForTestSerDesr()

View File

@ -57,6 +57,6 @@ public class MapLookupExtractorFactoryTest
ObjectMapper mapper = new DefaultObjectMapper();
mapper.registerSubtypes(MapLookupExtractorFactory.class);
LookupExtractorFactory lookupExtractorFactory = new MapLookupExtractorFactory(ImmutableMap.of("key", "value"), true);
Assert.assertEquals(lookupExtractorFactory, mapper.reader(LookupExtractorFactory.class).readValue(mapper.writeValueAsString(lookupExtractorFactory)));
Assert.assertEquals(lookupExtractorFactory, mapper.readerFor(LookupExtractorFactory.class).readValue(mapper.writeValueAsString(lookupExtractorFactory)));
}
}

View File

@ -106,7 +106,7 @@ public class DefaultOfflineAppenderatorFactoryTest
)
);
ObjectMapper objectMapper = injector.getInstance(ObjectMapper.class);
AppenderatorFactory defaultOfflineAppenderatorFactory = objectMapper.reader(AppenderatorFactory.class)
AppenderatorFactory defaultOfflineAppenderatorFactory = objectMapper.readerFor(AppenderatorFactory.class)
.readValue("{\"type\":\"offline\"}");
final Map<String, Object> parserMap = objectMapper.convertValue(