NIFI-12102 Fixed deprecation warnings when instantiating primitive types

This closes #7769

Signed-off-by: David Handermann <exceptionfactory@apache.org>
This commit is contained in:
Pierre Villard 2023-09-21 16:34:40 +02:00 committed by exceptionfactory
parent badbc1aca6
commit 5405a3b08b
No known key found for this signature in database
GPG Key ID: 29B6A52D2AAE8DBA
10 changed files with 60 additions and 60 deletions

View File

@ -519,7 +519,7 @@ public class TestDataTypeUtils {
@Test
public void testIsCompatibleDataTypeInteger() {
final DataType dataType = RecordFieldType.INT.getDataType();
assertTrue(DataTypeUtils.isCompatibleDataType(new Integer("1234567"), dataType));
assertTrue(DataTypeUtils.isCompatibleDataType(Integer.valueOf("1234567"), dataType));
assertTrue(DataTypeUtils.isCompatibleDataType("1234567", dataType));
assertFalse(DataTypeUtils.isCompatibleDataType(new BigInteger("12345678901234567890"), dataType));
assertFalse(DataTypeUtils.isCompatibleDataType(1234567890123456789L, dataType));

View File

@ -290,7 +290,7 @@ public class PBKDF2SecureHasherTest {
List<Integer> validHLengths = Arrays.asList(32, 64);
// MAX_VALUE + 1 will become MIN_VALUE because of signed integer math
List<Integer> invalidDKLengths = Arrays.asList(-1, 0, Integer.MAX_VALUE + 1, new Integer(Integer.MAX_VALUE * 2 - 1));
List<Integer> invalidDKLengths = Arrays.asList(-1, 0, Integer.MAX_VALUE + 1, Integer.valueOf(Integer.MAX_VALUE * 2 - 1));
// Act and Assert
validHLengths.forEach(hLen -> {

View File

@ -35,12 +35,12 @@ public class MetricsService {
//processor - specific metrics
public Map<String, Double> getProcessorMetrics(ProcessorStatus status) {
final Map<String, Double> metrics = new HashMap<>();
metrics.put(MetricNames.FLOW_FILES_RECEIVED, new Double(status.getInputCount()));
metrics.put(MetricNames.FLOW_FILES_SENT, new Double(status.getOutputCount()));
metrics.put(MetricNames.BYTES_READ, new Double(status.getInputBytes()));
metrics.put(MetricNames.BYTES_WRITTEN, new Double(status.getOutputBytes()));
metrics.put(MetricNames.ACTIVE_THREADS, new Double(status.getActiveThreadCount()));
metrics.put(MetricNames.TOTAL_TASK_DURATION, new Double(status.getProcessingNanos()));
metrics.put(MetricNames.FLOW_FILES_RECEIVED, Double.valueOf(status.getInputCount()));
metrics.put(MetricNames.FLOW_FILES_SENT, Double.valueOf(status.getOutputCount()));
metrics.put(MetricNames.BYTES_READ, Double.valueOf(status.getInputBytes()));
metrics.put(MetricNames.BYTES_WRITTEN, Double.valueOf(status.getOutputBytes()));
metrics.put(MetricNames.ACTIVE_THREADS, Double.valueOf(status.getActiveThreadCount()));
metrics.put(MetricNames.TOTAL_TASK_DURATION, Double.valueOf(status.getProcessingNanos()));
return metrics;
}
@ -52,26 +52,26 @@ public class MetricsService {
public Map<String, Double> getPortStatusMetrics(PortStatus status){
final Map<String, Double> metrics = new HashMap<>();
metrics.put(MetricNames.ACTIVE_THREADS, new Double(status.getActiveThreadCount()));
metrics.put(MetricNames.INPUT_COUNT, new Double(status.getInputCount()));
metrics.put(MetricNames.OUTPUT_COUNT, new Double(status.getOutputCount()));
metrics.put(MetricNames.INPUT_BYTES, new Double(status.getInputBytes()));
metrics.put(MetricNames.OUTPUT_BYTES, new Double(status.getOutputBytes()));
metrics.put(MetricNames.FLOW_FILES_RECEIVED, new Double(status.getFlowFilesReceived()));
metrics.put(MetricNames.FLOW_FILES_SENT, new Double(status.getFlowFilesSent()));
metrics.put(MetricNames.BYTES_RECEIVED, new Double(status.getBytesReceived()));
metrics.put(MetricNames.BYTES_SENT, new Double(status.getBytesSent()));
metrics.put(MetricNames.ACTIVE_THREADS, Double.valueOf(status.getActiveThreadCount()));
metrics.put(MetricNames.INPUT_COUNT, Double.valueOf(status.getInputCount()));
metrics.put(MetricNames.OUTPUT_COUNT, Double.valueOf(status.getOutputCount()));
metrics.put(MetricNames.INPUT_BYTES, Double.valueOf(status.getInputBytes()));
metrics.put(MetricNames.OUTPUT_BYTES, Double.valueOf(status.getOutputBytes()));
metrics.put(MetricNames.FLOW_FILES_RECEIVED, Double.valueOf(status.getFlowFilesReceived()));
metrics.put(MetricNames.FLOW_FILES_SENT, Double.valueOf(status.getFlowFilesSent()));
metrics.put(MetricNames.BYTES_RECEIVED, Double.valueOf(status.getBytesReceived()));
metrics.put(MetricNames.BYTES_SENT, Double.valueOf(status.getBytesSent()));
return metrics;
}
public Map<String, Double> getConnectionStatusMetrics(ConnectionStatus status) {
final Map<String, Double> metrics = new HashMap<>();
metrics.put(MetricNames.INPUT_COUNT, new Double(status.getInputCount()));
metrics.put(MetricNames.INPUT_BYTES, new Double(status.getInputBytes()));
metrics.put(MetricNames.QUEUED_COUNT, new Double(status.getQueuedCount()));
metrics.put(MetricNames.QUEUED_BYTES, new Double(status.getQueuedBytes()));
metrics.put(MetricNames.OUTPUT_COUNT, new Double(status.getOutputCount()));
metrics.put(MetricNames.OUTPUT_BYTES, new Double(status.getOutputBytes()));
metrics.put(MetricNames.INPUT_COUNT, Double.valueOf(status.getInputCount()));
metrics.put(MetricNames.INPUT_BYTES, Double.valueOf(status.getInputBytes()));
metrics.put(MetricNames.QUEUED_COUNT, Double.valueOf(status.getQueuedCount()));
metrics.put(MetricNames.QUEUED_BYTES, Double.valueOf(status.getQueuedBytes()));
metrics.put(MetricNames.OUTPUT_COUNT, Double.valueOf(status.getOutputCount()));
metrics.put(MetricNames.OUTPUT_BYTES, Double.valueOf(status.getOutputBytes()));
return metrics;
}
@ -79,16 +79,16 @@ public class MetricsService {
//general metrics for whole dataflow
public Map<String, Double> getDataFlowMetrics(ProcessGroupStatus status) {
final Map<String, Double> metrics = new HashMap<>();
metrics.put(MetricNames.FLOW_FILES_RECEIVED, new Double(status.getFlowFilesReceived()));
metrics.put(MetricNames.BYTES_RECEIVED, new Double(status.getBytesReceived()));
metrics.put(MetricNames.FLOW_FILES_SENT, new Double(status.getFlowFilesSent()));
metrics.put(MetricNames.BYTES_SENT, new Double(status.getBytesSent()));
metrics.put(MetricNames.FLOW_FILES_QUEUED, new Double(status.getQueuedCount()));
metrics.put(MetricNames.BYTES_QUEUED, new Double(status.getQueuedContentSize()));
metrics.put(MetricNames.BYTES_READ, new Double(status.getBytesRead()));
metrics.put(MetricNames.BYTES_WRITTEN, new Double(status.getBytesWritten()));
metrics.put(MetricNames.ACTIVE_THREADS, new Double(status.getActiveThreadCount()));
metrics.put(MetricNames.TOTAL_TASK_DURATION, new Double(calculateProcessingNanos(status)));
metrics.put(MetricNames.FLOW_FILES_RECEIVED, Double.valueOf(status.getFlowFilesReceived()));
metrics.put(MetricNames.BYTES_RECEIVED, Double.valueOf(status.getBytesReceived()));
metrics.put(MetricNames.FLOW_FILES_SENT, Double.valueOf(status.getFlowFilesSent()));
metrics.put(MetricNames.BYTES_SENT, Double.valueOf(status.getBytesSent()));
metrics.put(MetricNames.FLOW_FILES_QUEUED, Double.valueOf(status.getQueuedCount()));
metrics.put(MetricNames.BYTES_QUEUED, Double.valueOf(status.getQueuedContentSize()));
metrics.put(MetricNames.BYTES_READ, Double.valueOf(status.getBytesRead()));
metrics.put(MetricNames.BYTES_WRITTEN, Double.valueOf(status.getBytesWritten()));
metrics.put(MetricNames.ACTIVE_THREADS, Double.valueOf(status.getActiveThreadCount()));
metrics.put(MetricNames.TOTAL_TASK_DURATION, Double.valueOf(calculateProcessingNanos(status)));
status.getOutputPortStatus();
return metrics;
}
@ -96,28 +96,28 @@ public class MetricsService {
//virtual machine metrics
public Map<String, Double> getJVMMetrics(JmxJvmMetrics virtualMachineMetrics) {
final Map<String, Double> metrics = new HashMap<>();
metrics.put(MetricNames.JVM_UPTIME, new Double(virtualMachineMetrics.uptime()));
metrics.put(MetricNames.JVM_HEAP_USED, new Double(virtualMachineMetrics.heapUsed(DataUnit.B)));
metrics.put(MetricNames.JVM_HEAP_USAGE, new Double(virtualMachineMetrics.heapUsage()));
metrics.put(MetricNames.JVM_NON_HEAP_USAGE, new Double(virtualMachineMetrics.nonHeapUsage()));
metrics.put(MetricNames.JVM_THREAD_COUNT, new Double(virtualMachineMetrics.threadCount()));
metrics.put(MetricNames.JVM_DAEMON_THREAD_COUNT, new Double(virtualMachineMetrics.daemonThreadCount()));
metrics.put(MetricNames.JVM_FILE_DESCRIPTOR_USAGE, new Double(virtualMachineMetrics.fileDescriptorUsage()));
metrics.put(MetricNames.JVM_UPTIME, Double.valueOf(virtualMachineMetrics.uptime()));
metrics.put(MetricNames.JVM_HEAP_USED, Double.valueOf(virtualMachineMetrics.heapUsed(DataUnit.B)));
metrics.put(MetricNames.JVM_HEAP_USAGE, Double.valueOf(virtualMachineMetrics.heapUsage()));
metrics.put(MetricNames.JVM_NON_HEAP_USAGE, Double.valueOf(virtualMachineMetrics.nonHeapUsage()));
metrics.put(MetricNames.JVM_THREAD_COUNT, Double.valueOf(virtualMachineMetrics.threadCount()));
metrics.put(MetricNames.JVM_DAEMON_THREAD_COUNT, Double.valueOf(virtualMachineMetrics.daemonThreadCount()));
metrics.put(MetricNames.JVM_FILE_DESCRIPTOR_USAGE, Double.valueOf(virtualMachineMetrics.fileDescriptorUsage()));
for (Map.Entry<Thread.State, Double> entry : virtualMachineMetrics.threadStatePercentages().entrySet()) {
final int normalizedValue = (int) (100 * (entry.getValue() == null ? 0 : entry.getValue()));
switch (entry.getKey()) {
case BLOCKED:
metrics.put(MetricNames.JVM_THREAD_STATES_BLOCKED, new Double(normalizedValue));
metrics.put(MetricNames.JVM_THREAD_STATES_BLOCKED, Double.valueOf(normalizedValue));
break;
case RUNNABLE:
metrics.put(MetricNames.JVM_THREAD_STATES_RUNNABLE, new Double(normalizedValue));
metrics.put(MetricNames.JVM_THREAD_STATES_RUNNABLE, Double.valueOf(normalizedValue));
break;
case TERMINATED:
metrics.put(MetricNames.JVM_THREAD_STATES_TERMINATED, new Double(normalizedValue));
metrics.put(MetricNames.JVM_THREAD_STATES_TERMINATED, Double.valueOf(normalizedValue));
break;
case TIMED_WAITING:
metrics.put(MetricNames.JVM_THREAD_STATES_TIMED_WAITING, new Double(normalizedValue));
metrics.put(MetricNames.JVM_THREAD_STATES_TIMED_WAITING, Double.valueOf(normalizedValue));
break;
default:
break;
@ -128,8 +128,8 @@ public class MetricsService {
final String gcName = entry.getKey().replace(" ", "");
final long runs = entry.getValue().getRuns();
final long timeMS = entry.getValue().getTime(TimeUnit.MILLISECONDS);
metrics.put(MetricNames.JVM_GC_RUNS + "." + gcName,new Double(runs));
metrics.put(MetricNames.JVM_GC_TIME + "." + gcName, new Double(timeMS));
metrics.put(MetricNames.JVM_GC_RUNS + "." + gcName, Double.valueOf(runs));
metrics.put(MetricNames.JVM_GC_TIME + "." + gcName, Double.valueOf(timeMS));
}
return metrics;

View File

@ -1121,7 +1121,7 @@ public class TestFlowController {
// create a processor dto
final ProcessorDTO processorDTO = new ProcessorDTO();
processorDTO.setId(UUID.randomUUID().toString()); // use a different id here
processorDTO.setPosition(new PositionDTO(new Double(0), new Double(0)));
processorDTO.setPosition(new PositionDTO(Double.valueOf(0), Double.valueOf(0)));
processorDTO.setStyle(processorNode.getStyle());
processorDTO.setParentGroupId("1234");
processorDTO.setInputRequirement(processorNode.getInputRequirement().name());
@ -1180,7 +1180,7 @@ public class TestFlowController {
// create a processor dto
final ProcessorDTO processorDTO = new ProcessorDTO();
processorDTO.setId(UUID.randomUUID().toString()); // use a different id here
processorDTO.setPosition(new PositionDTO(new Double(0), new Double(0)));
processorDTO.setPosition(new PositionDTO(Double.valueOf(0), Double.valueOf(0)));
processorDTO.setStyle(processorNode.getStyle());
processorDTO.setParentGroupId("1234");
processorDTO.setInputRequirement(processorNode.getInputRequirement().name());

View File

@ -136,7 +136,7 @@ public class TestPutHBaseRecord {
String nameVal = Bytes.toString(name.getBuffer());
String codeVal = Bytes.toString(code.getBuffer());
assertTrue(NAMES.contains(nameVal), "Name was not found");
assertTrue(CODES.contains(new Long(codeVal)), "Code was not found ");
assertTrue(CODES.contains(Long.valueOf(codeVal)), "Code was not found ");
}
@Test

View File

@ -178,7 +178,7 @@ public class ModifyHTMLElement extends AbstractHTMLProcessor {
out.write(doc.html().getBytes(StandardCharsets.UTF_8));
}
});
ff = session.putAttribute(ff, NUM_ELEMENTS_MODIFIED_ATTR, new Integer(eles.size()).toString());
ff = session.putAttribute(ff, NUM_ELEMENTS_MODIFIED_ATTR, Integer.valueOf(eles.size()).toString());
session.transfer(ff, REL_SUCCESS);
// Transfer the original HTML

View File

@ -202,7 +202,7 @@ public class MongoDBLookupServiceIT extends AbstractMongoIT {
.append("decimalField", Double.MAX_VALUE / 2.0)
.append("subrecordField", new Document()
.append("nestedString", "test")
.append("nestedLong", new Long(1000)))
.append("nestedLong", Long.valueOf(1000)))
.append("arrayField", list)
);
@ -217,7 +217,7 @@ public class MongoDBLookupServiceIT extends AbstractMongoIT {
assertEquals("Hello, world", record.getValue("stringField"), "The value was wrong.");
assertEquals("x-y-z", record.getValue("uuid"), "The value was wrong.");
assertEquals(new Long(10000), record.getValue("longField"));
assertEquals(Long.valueOf(10000), record.getValue("longField"));
assertEquals((Double.MAX_VALUE / 2.0), record.getValue("decimalField"));
assertEquals(d, record.getValue("dateField"));
assertEquals(ts.getTime(), ((Date)record.getValue("timestampField")).getTime());
@ -225,7 +225,7 @@ public class MongoDBLookupServiceIT extends AbstractMongoIT {
Record subRecord = record.getAsRecord("subrecordField", subSchema);
assertNotNull(subRecord);
assertEquals("test", subRecord.getValue("nestedString"));
assertEquals(new Long(1000), subRecord.getValue("nestedLong"));
assertEquals(Long.valueOf(1000), subRecord.getValue("nestedLong"));
assertEquals(list, record.getValue("arrayField"));
Map<String, Object> clean = new HashMap<>();

View File

@ -29,8 +29,8 @@ import java.util.TreeMap;
*/
public class RedisStateMap implements StateMap {
public static final Long DEFAULT_VERSION = new Long(-1);
public static final Integer DEFAULT_ENCODING = new Integer(1);
public static final Long DEFAULT_VERSION = Long.valueOf(-1);
public static final Integer DEFAULT_ENCODING = Integer.valueOf(1);
private final Long version;
private final Integer encodingVersion;

View File

@ -197,9 +197,9 @@ public class TestDistributedMapServerAndClient {
// Client 1 and 2 fetch the key
AtomicCacheEntry<String, String, Long> c1 = client1.fetch(key, stringSerializer, stringDeserializer);
AtomicCacheEntry<String, String, Long> c2 = client2.fetch(key, stringSerializer, stringDeserializer);
assertEquals(new Long(0), c1.getRevision().orElse(0L));
assertEquals(Long.valueOf(0), c1.getRevision().orElse(0L));
assertEquals("valueC1-0", c1.getValue());
assertEquals(new Long(0), c2.getRevision().orElse(0L));
assertEquals(Long.valueOf(0), c2.getRevision().orElse(0L));
assertEquals("valueC1-0", c2.getValue());
// Client 1 replace
@ -214,7 +214,7 @@ public class TestDistributedMapServerAndClient {
// Client 2 fetch the key again
c2 = client2.fetch(key, stringSerializer, stringDeserializer);
assertEquals("valueC1-1", c2.getValue());
assertEquals(new Long(1), c2.getRevision().orElse(0L));
assertEquals(Long.valueOf(1), c2.getRevision().orElse(0L));
// Now, Client 2 knows the correct revision so it can replace the key
c2.setValue("valueC2-2");
@ -224,7 +224,7 @@ public class TestDistributedMapServerAndClient {
// Assert the cache
c2 = client2.fetch(key, stringSerializer, stringDeserializer);
assertEquals("valueC2-2", c2.getValue());
assertEquals(new Long(2), c2.getRevision().orElse(0L));
assertEquals(Long.valueOf(2), c2.getRevision().orElse(0L));
} finally {
client1.close();
client2.close();

View File

@ -81,7 +81,7 @@ public abstract class AbstractNiFiCommand<R extends Result> extends AbstractProp
final String clientId = getContext().getSession().getNiFiClientID();
final RevisionDTO revisionDTO = new RevisionDTO();
revisionDTO.setVersion(new Long(0));
revisionDTO.setVersion(Long.valueOf(0));
revisionDTO.setClientId(clientId);
return revisionDTO;
}