mirror of https://github.com/apache/nifi.git
NIFI-9638 Refactored Google Guava references
- Refactored nifi-framework and nifi-standard modules - Replaced Google Cache with Caffeine Cache - Replaced Google collections classes with standard Java collections This closes #5730. Signed-off-by: Kevin Doran <kdoran@apache.org>
This commit is contained in:
parent
dc7d9510cd
commit
43748a5523
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.cluster.coordination.http.endpoints;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.nifi.cluster.manager.NodeResponse;
|
||||
import org.apache.nifi.cluster.manager.PortEntityMerger;
|
||||
import org.apache.nifi.cluster.protocol.NodeIdentifier;
|
||||
|
@ -27,6 +26,7 @@ import org.apache.nifi.web.api.entity.ControllerEntity;
|
|||
import java.net.URI;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
@ -88,8 +88,8 @@ public class ControllerEndpointMerger extends AbstractSingleDTOEndpoint<Controll
|
|||
}
|
||||
|
||||
// get intersection of input and output ports
|
||||
final Set<PortDTO> clientInputPorts = Sets.newHashSet(clientDto.getInputPorts());
|
||||
final Set<PortDTO> clientOutputPorts = Sets.newHashSet(clientDto.getOutputPorts());
|
||||
final Set<PortDTO> clientInputPorts = new HashSet<>(clientDto.getInputPorts());
|
||||
final Set<PortDTO> clientOutputPorts = new HashSet<>(clientDto.getOutputPorts());
|
||||
dtoMap.values().forEach(controller -> {
|
||||
clientInputPorts.retainAll(controller.getInputPorts());
|
||||
clientOutputPorts.retainAll(controller.getOutputPorts());
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.cluster.manager;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.nifi.cluster.protocol.NodeIdentifier;
|
||||
import org.apache.nifi.web.api.entity.BulletinEntity;
|
||||
|
||||
|
@ -75,7 +74,7 @@ public final class BulletinMerger {
|
|||
}
|
||||
}
|
||||
|
||||
final List<BulletinEntity> entities = Lists.newArrayList();
|
||||
final List<BulletinEntity> entities = new ArrayList<>();
|
||||
|
||||
// group by message when permissions allow
|
||||
final Map<String,List<BulletinEntity>> groupingEntities = bulletinEntities.stream()
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.components.monitor;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.nifi.controller.ActiveThreadInfo;
|
||||
import org.apache.nifi.controller.ProcessorNode;
|
||||
import org.apache.nifi.controller.ThreadDetails;
|
||||
|
@ -79,12 +78,10 @@ public class LongRunningTaskMonitor implements Runnable {
|
|||
getLogger().info("Active threads: {}; Long running threads: {}; time to check: {} nanos", activeThreadCount, longRunningThreadCount, NumberFormat.getInstance().format(nanos));
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
protected Logger getLogger() {
|
||||
return LOGGER;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
protected ThreadDetails captureThreadDetails() {
|
||||
return ThreadDetails.capture();
|
||||
}
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.controller;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataInputStream;
|
||||
|
@ -29,13 +27,16 @@ import java.util.Collection;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.apache.nifi.persistence.StandardSnippetDeserializer;
|
||||
import org.apache.nifi.persistence.StandardSnippetSerializer;
|
||||
import org.apache.nifi.stream.io.StreamUtils;
|
||||
|
||||
public class SnippetManager {
|
||||
|
||||
private final Cache<String, StandardSnippet> snippetMap = CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).build();
|
||||
private final Cache<String, StandardSnippet> snippetMap = Caffeine.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).build();
|
||||
|
||||
public synchronized void addSnippet(final StandardSnippet snippet) {
|
||||
if (snippetMap.getIfPresent(snippet.getId()) != null) {
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
|
||||
package org.apache.nifi.controller.leader.election;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
@ -27,8 +29,6 @@ import org.apache.zookeeper.ZooDefs;
|
|||
import org.apache.zookeeper.data.ACL;
|
||||
import org.apache.zookeeper.data.Id;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
public class CuratorACLProviderFactory {
|
||||
|
||||
public static final String SASL_AUTH_SCHEME = "sasl";
|
||||
|
@ -61,7 +61,7 @@ public class CuratorACLProviderFactory {
|
|||
principal.append(realm);
|
||||
}
|
||||
|
||||
this.acls = Lists.newArrayList(new ACL(ZooDefs.Perms.ALL, new Id(SASL_AUTH_SCHEME, principal.toString())));
|
||||
this.acls = new ArrayList<>(Arrays.asList(new ACL(ZooDefs.Perms.ALL, new Id(SASL_AUTH_SCHEME, principal.toString()))));
|
||||
this.acls.addAll(ZooDefs.Ids.READ_ACL_UNSAFE);
|
||||
|
||||
}else{
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
package org.apache.nifi.controller.queue.clustered.partition;
|
||||
|
||||
import com.google.common.hash.Hashing;
|
||||
import org.apache.nifi.controller.repository.FlowFileRecord;
|
||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -25,6 +24,7 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
public class CorrelationAttributePartitioner implements FlowFilePartitioner {
|
||||
private static final Logger logger = LoggerFactory.getLogger(CorrelationAttributePartitioner.class);
|
||||
|
@ -39,15 +39,7 @@ public class CorrelationAttributePartitioner implements FlowFilePartitioner {
|
|||
public QueuePartition getPartition(final FlowFileRecord flowFile, final QueuePartition[] partitions, final QueuePartition localPartition) {
|
||||
final int hash = hash(flowFile);
|
||||
|
||||
// The consistentHash method appears to always return a bucket of '1' if there are 2 possible buckets,
|
||||
// so in this case we will just use modulo division to avoid this. I suspect this is a bug with the Guava
|
||||
// implementation, but it's not clear at this point.
|
||||
final int index;
|
||||
if (partitions.length < 3) {
|
||||
index = Math.floorMod(hash, partitions.length);
|
||||
} else {
|
||||
index = Hashing.consistentHash(hash, partitions.length);
|
||||
}
|
||||
final int index = findIndex(hash, partitions.length);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
final List<String> partitionDescriptions = new ArrayList<>(partitions.length);
|
||||
|
@ -75,4 +67,13 @@ public class CorrelationAttributePartitioner implements FlowFilePartitioner {
|
|||
public boolean isRebalanceOnFailure() {
|
||||
return false;
|
||||
}
|
||||
|
||||
private int findIndex(final long hash, final int partitions) {
|
||||
final Random random = new Random(hash);
|
||||
int index = random.nextInt();
|
||||
while (index < 0 && index >= partitions) {
|
||||
index = random.nextInt();
|
||||
}
|
||||
return index;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.nifi.util.Tuple;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.common.primitives.Doubles;
|
||||
/**
|
||||
* <p>
|
||||
* An implementation of {@link StatusAnalytics} that is provides Connection related analysis/prediction for a given connection instance
|
||||
|
@ -378,7 +377,7 @@ public class ConnectionStatusAnalytics implements StatusAnalytics {
|
|||
|
||||
Double score = getScore(model);
|
||||
|
||||
if (score == null || (Doubles.isFinite(score) && !Double.isNaN(score) && score < scoreThreshold)) {
|
||||
if (score == null || (score < scoreThreshold)) {
|
||||
if (supportOnlineLearning && model.supportsOnlineLearning()) {
|
||||
model.clear();
|
||||
}
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.controller.status.history;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.LocalTime;
|
||||
import java.time.ZoneId;
|
||||
|
@ -29,7 +27,9 @@ public class JsonNodeStatusHistoryDumpFactory implements StatusHistoryDumpFactor
|
|||
|
||||
@Override
|
||||
public StatusHistoryDump create(int days) {
|
||||
Preconditions.checkArgument(days > 0, String.format("The number of days shall be greater than 0. The current value is %s.", days));
|
||||
if (days <= 0) {
|
||||
throw new IllegalArgumentException(String.format("The number of days shall be greater than 0. The current value is %s.", days));
|
||||
}
|
||||
final LocalDateTime endOfToday = LocalDateTime.now().with(LocalTime.MAX);
|
||||
final LocalDateTime startOfDaysBefore = endOfToday.minusDays(days).with(LocalTime.MIN);
|
||||
|
||||
|
|
|
@ -17,9 +17,6 @@
|
|||
|
||||
package org.apache.nifi.registry.flow.mapping;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.nifi.authorization.resource.ComponentAuthorizable;
|
||||
import org.apache.nifi.bundle.BundleCoordinate;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
|
@ -84,7 +81,10 @@ import org.mockito.junit.MockitoJUnitRunner;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -128,10 +128,10 @@ public class NiFiRegistryFlowMapperTest {
|
|||
prepareProcessGroupWithParameterContext(Collections.emptyList(),
|
||||
true, true);
|
||||
final ProcessGroup innerProcessGroup =
|
||||
prepareProcessGroupWithParameterContext(Lists.newArrayList(innerInnerProcessGroup),
|
||||
prepareProcessGroupWithParameterContext(Collections.singletonList(innerInnerProcessGroup),
|
||||
true, false);
|
||||
final ProcessGroup processGroup =
|
||||
prepareProcessGroupWithParameterContext(Lists.newArrayList(innerProcessGroup),
|
||||
prepareProcessGroupWithParameterContext(Collections.singletonList(innerProcessGroup),
|
||||
false, false);
|
||||
|
||||
// first nesting should be traversed because child is not version controlled, but deeper nesting should be ignored
|
||||
|
@ -157,10 +157,10 @@ public class NiFiRegistryFlowMapperTest {
|
|||
prepareProcessGroupWithParameterContext(Collections.emptyList(),
|
||||
true, true);
|
||||
final ProcessGroup innerProcessGroup =
|
||||
prepareProcessGroupWithParameterContext(Lists.newArrayList(innerInnerProcessGroup),
|
||||
prepareProcessGroupWithParameterContext(Collections.singletonList(innerInnerProcessGroup),
|
||||
false, true);
|
||||
final ProcessGroup processGroup =
|
||||
prepareProcessGroupWithParameterContext(Lists.newArrayList(innerProcessGroup),
|
||||
prepareProcessGroupWithParameterContext(Collections.singletonList(innerProcessGroup),
|
||||
true, true);
|
||||
|
||||
// include nested parameter contexts even though they are version controlled because map descendant indicator is true
|
||||
|
@ -190,9 +190,9 @@ public class NiFiRegistryFlowMapperTest {
|
|||
final ProcessGroup processGroup =
|
||||
prepareProcessGroup(1,false,false, false,
|
||||
false, false, null,
|
||||
false, true, Lists.newArrayList(innerProcessGroup));
|
||||
false, true, Collections.singletonList(innerProcessGroup));
|
||||
|
||||
final List<ProcessGroup> allProcessGroups = Lists.newArrayList(innerProcessGroup);
|
||||
final List<ProcessGroup> allProcessGroups = Collections.singletonList(innerProcessGroup);
|
||||
when(processGroup.findAllProcessGroups()).thenReturn(allProcessGroups);
|
||||
|
||||
// perform the mapping, excluding descendant versioned flows
|
||||
|
@ -227,13 +227,13 @@ public class NiFiRegistryFlowMapperTest {
|
|||
final ProcessGroup innerProcessGroup =
|
||||
prepareProcessGroup(1,true, false, false,
|
||||
true, true, externalControllerServiceNode,
|
||||
true, true, Lists.newArrayList(innerInnerProcessGroup));
|
||||
true, true, Collections.singletonList(innerInnerProcessGroup));
|
||||
final ProcessGroup processGroup =
|
||||
prepareProcessGroup(2,false,false, true,
|
||||
false, true, null,
|
||||
false, true, Lists.newArrayList(innerProcessGroup));
|
||||
false, true, Collections.singletonList(innerProcessGroup));
|
||||
|
||||
final List<ProcessGroup> allProcessGroups = Lists.newArrayList(innerProcessGroup, innerInnerProcessGroup);
|
||||
final List<ProcessGroup> allProcessGroups = Arrays.asList(innerProcessGroup, innerInnerProcessGroup);
|
||||
when(processGroup.findAllProcessGroups()).thenReturn(allProcessGroups);
|
||||
|
||||
// perform the mapping
|
||||
|
@ -263,9 +263,9 @@ public class NiFiRegistryFlowMapperTest {
|
|||
final ParameterContext parameterContext = mock(ParameterContext.class);
|
||||
when(processGroup.getParameterContext()).thenReturn(parameterContext);
|
||||
when(parameterContext.getName()).thenReturn("context" + (counter++));
|
||||
final Map<ParameterDescriptor, Parameter> parametersMap = Maps.newHashMap();
|
||||
final Map<ParameterDescriptor, Parameter> parametersMap = new LinkedHashMap<>();
|
||||
when(parameterContext.getParameters()).thenReturn(parametersMap);
|
||||
when(parameterContext.getInheritedParameterContextNames()).thenReturn(Arrays.asList("other-context"));
|
||||
when(parameterContext.getInheritedParameterContextNames()).thenReturn(Collections.singletonList("other-context"));
|
||||
|
||||
addParameter(parametersMap, "value" + (counter++), false);
|
||||
addParameter(parametersMap, "value" + (counter++), true);
|
||||
|
@ -276,7 +276,7 @@ public class NiFiRegistryFlowMapperTest {
|
|||
when(processGroup.getVersionControlInformation()).thenReturn(mock(VersionControlInformation.class));
|
||||
}
|
||||
|
||||
when(processGroup.getProcessGroups()).thenReturn(Sets.newLinkedHashSet(childProcessGroups));
|
||||
when(processGroup.getProcessGroups()).thenReturn(new HashSet<>(childProcessGroups));
|
||||
|
||||
return processGroup;
|
||||
}
|
||||
|
@ -348,15 +348,15 @@ public class NiFiRegistryFlowMapperTest {
|
|||
when(processGroup.getFlowFileOutboundPolicy()).thenReturn(FlowFileOutboundPolicy.STREAM_WHEN_AVAILABLE);
|
||||
|
||||
// prep funnels
|
||||
final Set<Funnel> funnels = Sets.newHashSet();
|
||||
final Set<Funnel> funnels = new LinkedHashSet<>();
|
||||
if (includeFunnel) {
|
||||
funnels.add(prepareFunnel(processGroupId));
|
||||
}
|
||||
when(processGroup.getFunnels()).thenReturn(funnels);
|
||||
|
||||
// prep ports
|
||||
final Set<Port> inputPorts = Sets.newHashSet();
|
||||
final Set<Port> outputPorts = Sets.newHashSet();
|
||||
final Set<Port> inputPorts = new LinkedHashSet<>();
|
||||
final Set<Port> outputPorts = new LinkedHashSet<>();
|
||||
if (includePorts) {
|
||||
inputPorts.add(preparePort(processGroupId, PortType.INPUT_PORT));
|
||||
outputPorts.add(preparePort(processGroupId, PortType.OUTPUT_PORT));
|
||||
|
@ -365,15 +365,15 @@ public class NiFiRegistryFlowMapperTest {
|
|||
when(processGroup.getOutputPorts()).thenReturn(outputPorts);
|
||||
|
||||
// prep labels
|
||||
final Set<Label> labels = Sets.newHashSet();
|
||||
final Set<Label> labels = new LinkedHashSet<>();
|
||||
if (includeLabels) {
|
||||
labels.add(prepareLabel(processGroupId));
|
||||
}
|
||||
when(processGroup.getLabels()).thenReturn(labels);
|
||||
|
||||
// prep connections and processors
|
||||
final Set<ProcessorNode> processorNodes = Sets.newLinkedHashSet();
|
||||
final Set<Connection> connections = Sets.newHashSet();
|
||||
final Set<ProcessorNode> processorNodes = new LinkedHashSet<>();
|
||||
final Set<Connection> connections = new LinkedHashSet<>();
|
||||
if (numProcessors == 2) {
|
||||
// 2 processors connected together
|
||||
final ProcessorNode processorNode1 = prepareProcessor(processGroup, externalControllerServiceNode);
|
||||
|
@ -391,7 +391,7 @@ public class NiFiRegistryFlowMapperTest {
|
|||
when(processGroup.getConnections()).thenReturn(connections);
|
||||
|
||||
// prep controller services
|
||||
final Set<ControllerServiceNode> controllerServiceNodes = Sets.newHashSet();
|
||||
final Set<ControllerServiceNode> controllerServiceNodes = new LinkedHashSet<>();
|
||||
if (includeControllerService) {
|
||||
controllerServiceNodes.add(prepareControllerService(processGroupId));
|
||||
}
|
||||
|
@ -400,14 +400,14 @@ public class NiFiRegistryFlowMapperTest {
|
|||
// prep variable registry
|
||||
final ComponentVariableRegistry componentVariableRegistry = mock(ComponentVariableRegistry.class);
|
||||
when(processGroup.getVariableRegistry()).thenReturn(componentVariableRegistry);
|
||||
final Map<VariableDescriptor, String> registryVariableMap = Maps.newHashMap();
|
||||
final Map<VariableDescriptor, String> registryVariableMap = new LinkedHashMap<>();
|
||||
if (includeVariableRegistry) {
|
||||
registryVariableMap.putAll(prepareVariableRegistry());
|
||||
}
|
||||
when(componentVariableRegistry.getVariableMap()).thenReturn(registryVariableMap);
|
||||
|
||||
// prepare remote process group
|
||||
final Set<RemoteProcessGroup> remoteProcessGroups = Sets.newHashSet();
|
||||
final Set<RemoteProcessGroup> remoteProcessGroups = new LinkedHashSet<>();
|
||||
if (includeRemoteProcessGroup) {
|
||||
remoteProcessGroups.add(prepareRemoteProcessGroup(processGroupId));
|
||||
}
|
||||
|
@ -420,7 +420,7 @@ public class NiFiRegistryFlowMapperTest {
|
|||
}
|
||||
|
||||
// prep nested process groups
|
||||
when(processGroup.getProcessGroups()).thenReturn(Sets.newLinkedHashSet(childProcessGroups));
|
||||
when(processGroup.getProcessGroups()).thenReturn(new LinkedHashSet<>(childProcessGroups));
|
||||
|
||||
return processGroup;
|
||||
}
|
||||
|
@ -469,7 +469,7 @@ public class NiFiRegistryFlowMapperTest {
|
|||
}
|
||||
final PropertyDescriptor propertyDescriptor = propertyDescriptorBuilder.build();
|
||||
final PropertyConfiguration propertyConfiguration = mock(PropertyConfiguration.class);
|
||||
final Map<PropertyDescriptor, PropertyConfiguration> properties = Maps.newHashMap();
|
||||
final Map<PropertyDescriptor, PropertyConfiguration> properties = new LinkedHashMap<>();
|
||||
properties.put(propertyDescriptor, propertyConfiguration);
|
||||
when(processorNode.getProperties()).thenReturn(properties);
|
||||
when(processorNode.getProperty(propertyDescriptor)).thenReturn(propertyConfiguration);
|
||||
|
@ -483,8 +483,8 @@ public class NiFiRegistryFlowMapperTest {
|
|||
final Connection connection = mock(Connection.class);
|
||||
when(connection.getIdentifier()).thenReturn(UUID.randomUUID().toString());
|
||||
when(connection.getProcessGroup()).thenReturn(processGroup);
|
||||
when(connection.getBendPoints()).thenReturn(Lists.newArrayList(new Position(counter++, counter++)));
|
||||
when(connection.getRelationships()).thenReturn(Lists.newArrayList());
|
||||
when(connection.getBendPoints()).thenReturn(Collections.singletonList(new Position(counter++, counter++)));
|
||||
when(connection.getRelationships()).thenReturn(Collections.emptyList());
|
||||
final FlowFileQueue flowFileQueue = mock(FlowFileQueue.class);
|
||||
when(connection.getFlowFileQueue()).thenReturn(flowFileQueue);
|
||||
when(flowFileQueue.getPriorities()).thenReturn(Collections.emptyList());
|
||||
|
@ -500,7 +500,7 @@ public class NiFiRegistryFlowMapperTest {
|
|||
private Map<VariableDescriptor, String> prepareVariableRegistry() {
|
||||
final VariableDescriptor variableDescriptor =
|
||||
new VariableDescriptor.Builder("variable"+(counter++)).build();
|
||||
final Map<VariableDescriptor, String> variableRegistryMap = Maps.newHashMap();
|
||||
final Map<VariableDescriptor, String> variableRegistryMap = new LinkedHashMap<>();
|
||||
variableRegistryMap.put(variableDescriptor, "value"+(counter++));
|
||||
return variableRegistryMap;
|
||||
}
|
||||
|
@ -522,9 +522,9 @@ public class NiFiRegistryFlowMapperTest {
|
|||
when(remoteProcessGroup.getName()).thenReturn("remote" + (counter++));
|
||||
when(remoteProcessGroup.getTransportProtocol()).thenReturn(SiteToSiteTransportProtocol.HTTP);
|
||||
final RemoteGroupPort remoteGroupInputPort = prepareRemoteGroupPort(remoteProcessGroup);
|
||||
when(remoteProcessGroup.getInputPorts()).thenReturn(Sets.newHashSet(remoteGroupInputPort));
|
||||
when(remoteProcessGroup.getInputPorts()).thenReturn(Collections.singleton(remoteGroupInputPort));
|
||||
final RemoteGroupPort remoteGroupOutputPort = prepareRemoteGroupPort(remoteProcessGroup);
|
||||
when(remoteProcessGroup.getOutputPorts()).thenReturn(Sets.newHashSet(remoteGroupOutputPort));
|
||||
when(remoteProcessGroup.getOutputPorts()).thenReturn(Collections.singleton(remoteGroupOutputPort));
|
||||
return remoteProcessGroup;
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.web.server;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.net.InetAddress;
|
||||
|
@ -354,6 +353,6 @@ public class HostHeaderHandler extends ScopedHandler {
|
|||
* @return Not Defined status
|
||||
*/
|
||||
static boolean isNotDefined(Map<String, String> networkInterfaces) {
|
||||
return networkInterfaces == null || networkInterfaces.isEmpty() || networkInterfaces.values().stream().filter(value -> !Strings.isNullOrEmpty(value)).collect(Collectors.toList()).isEmpty();
|
||||
return networkInterfaces == null || networkInterfaces.isEmpty() || networkInterfaces.values().stream().filter(value -> StringUtils.isNotBlank(value)).collect(Collectors.toList()).isEmpty();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.web.server;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.NiFiServer;
|
||||
|
@ -835,7 +833,7 @@ public class JettyServer implements NiFiServer, ExtensionUiLoader {
|
|||
|
||||
logger.info("Configuring Jetty for " + connectorLabel + " on port: " + port);
|
||||
|
||||
final List<Connector> serverConnectors = Lists.newArrayList();
|
||||
final List<Connector> serverConnectors = new ArrayList<>();
|
||||
|
||||
// Calculate Idle Timeout as twice the auto-refresh interval. This ensures that even with some variance in timing,
|
||||
// we are able to avoid closing connections from users' browsers most of the time. This can make a significant difference
|
||||
|
@ -845,7 +843,7 @@ public class JettyServer implements NiFiServer, ExtensionUiLoader {
|
|||
final long idleTimeout = autoRefreshMillis * 2;
|
||||
|
||||
// If the interfaces collection is empty or each element is empty
|
||||
if (networkInterfaces.isEmpty() || networkInterfaces.values().stream().filter(value -> !Strings.isNullOrEmpty(value)).collect(Collectors.toList()).isEmpty()) {
|
||||
if (networkInterfaces.isEmpty() || networkInterfaces.values().stream().filter(value -> StringUtils.isNotBlank(value)).collect(Collectors.toList()).isEmpty()) {
|
||||
final ServerConnector serverConnector = serverConnectorCreator.create(server, configuration);
|
||||
|
||||
// Set host and port
|
||||
|
@ -857,7 +855,7 @@ public class JettyServer implements NiFiServer, ExtensionUiLoader {
|
|||
serverConnectors.add(serverConnector);
|
||||
} else {
|
||||
// Add connectors for all IPs from network interfaces
|
||||
serverConnectors.addAll(Lists.newArrayList(networkInterfaces.values().stream().map(ifaceName -> {
|
||||
serverConnectors.addAll(new ArrayList<>(networkInterfaces.values().stream().map(ifaceName -> {
|
||||
NetworkInterface iface = null;
|
||||
try {
|
||||
iface = NetworkInterface.getByName(ifaceName);
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.web;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import io.prometheus.client.CollectorRegistry;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.nifi.action.Action;
|
||||
|
@ -2850,7 +2849,7 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
|
|||
final ControllerServiceDTO dto = dtoFactory.createControllerServiceDto(cs);
|
||||
final ControllerServiceReference ref = controllerService.getReferences();
|
||||
final ControllerServiceReferencingComponentsEntity referencingComponentsEntity =
|
||||
createControllerServiceReferencingComponentsEntity(ref, Sets.newHashSet(controllerService.getIdentifier()));
|
||||
createControllerServiceReferencingComponentsEntity(ref, Collections.singleton(controllerService.getIdentifier()));
|
||||
dto.setReferencingComponents(referencingComponentsEntity.getControllerServiceReferencingComponents());
|
||||
return dto;
|
||||
});
|
||||
|
@ -4504,7 +4503,7 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
|
|||
@Override
|
||||
public ControllerServiceEntity getControllerService(final String controllerServiceId) {
|
||||
final ControllerServiceNode controllerService = controllerServiceDAO.getControllerService(controllerServiceId);
|
||||
return createControllerServiceEntity(controllerService, Sets.newHashSet(controllerServiceId));
|
||||
return createControllerServiceEntity(controllerService, Collections.singleton(controllerServiceId));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -4525,7 +4524,7 @@ public class StandardNiFiServiceFacade implements NiFiServiceFacade {
|
|||
public ControllerServiceReferencingComponentsEntity getControllerServiceReferencingComponents(final String controllerServiceId) {
|
||||
final ControllerServiceNode service = controllerServiceDAO.getControllerService(controllerServiceId);
|
||||
final ControllerServiceReference ref = service.getReferences();
|
||||
return createControllerServiceReferencingComponentsEntity(ref, Sets.newHashSet(controllerServiceId));
|
||||
return createControllerServiceReferencingComponentsEntity(ref, Collections.singleton(controllerServiceId));
|
||||
}
|
||||
|
||||
private ReportingTaskEntity createReportingTaskEntity(final ReportingTaskNode reportingTask) {
|
||||
|
|
|
@ -16,8 +16,8 @@
|
|||
*/
|
||||
package org.apache.nifi.web.api;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.authorization.AuthorizableLookup;
|
||||
import org.apache.nifi.authorization.AuthorizeAccess;
|
||||
|
@ -134,7 +134,7 @@ public abstract class ApplicationResource {
|
|||
private FlowController flowController;
|
||||
|
||||
private static final int MAX_CACHE_SOFT_LIMIT = 500;
|
||||
private final Cache<CacheKey, Request<? extends Entity>> twoPhaseCommitCache = CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).build();
|
||||
private final Cache<CacheKey, Request<? extends Entity>> twoPhaseCommitCache = Caffeine.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).build();
|
||||
|
||||
protected void forwardToLoginMessagePage(final HttpServletRequest httpServletRequest, final HttpServletResponse httpServletResponse, final String message) throws Exception {
|
||||
forwardToMessagePage(httpServletRequest, httpServletResponse, LOGIN_ERROR_TITLE, message);
|
||||
|
@ -718,7 +718,7 @@ public abstract class ApplicationResource {
|
|||
}
|
||||
|
||||
private <T extends Entity> void phaseOneStoreTransaction(final T requestEntity, final Revision revision, final Set<Revision> revisions) {
|
||||
if (twoPhaseCommitCache.size() > MAX_CACHE_SOFT_LIMIT) {
|
||||
if (twoPhaseCommitCache.estimatedSize() > MAX_CACHE_SOFT_LIMIT) {
|
||||
throw new IllegalStateException("The maximum number of requests are in progress.");
|
||||
}
|
||||
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.web;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.nifi.action.Component;
|
||||
import org.apache.nifi.action.FlowChangeAction;
|
||||
import org.apache.nifi.action.Operation;
|
||||
|
@ -71,7 +69,10 @@ import org.springframework.security.core.Authentication;
|
|||
import org.springframework.security.core.context.SecurityContextHolder;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
@ -359,12 +360,12 @@ public class StandardNiFiServiceFacadeTest {
|
|||
final String parameterName = "foo";
|
||||
final VersionedParameterContext versionedParameterContext = mock(VersionedParameterContext.class);
|
||||
when(versionedParameterContext.getName()).thenReturn(parameterName);
|
||||
final Map<String, VersionedParameterContext> parameterContexts = Maps.newHashMap();
|
||||
final Map<String, VersionedParameterContext> parameterContexts = new LinkedHashMap<>();
|
||||
parameterContexts.put(parameterName, versionedParameterContext);
|
||||
when(flowMapper.mapParameterContexts(processGroup, true)).thenReturn(parameterContexts);
|
||||
|
||||
final ExternalControllerServiceReference externalControllerServiceReference = mock(ExternalControllerServiceReference.class);
|
||||
final Map<String, ExternalControllerServiceReference> externalControllerServiceReferences = Maps.newHashMap();
|
||||
final Map<String, ExternalControllerServiceReference> externalControllerServiceReferences = new LinkedHashMap<>();
|
||||
externalControllerServiceReferences.put("test", externalControllerServiceReference);
|
||||
when(nonVersionedProcessGroup.getExternalControllerServiceReferences()).thenReturn(externalControllerServiceReferences);
|
||||
|
||||
|
@ -389,7 +390,7 @@ public class StandardNiFiServiceFacadeTest {
|
|||
when(processGroupDAO.getProcessGroup(groupId)).thenReturn(processGroup);
|
||||
|
||||
when(processGroup.getVersionControlInformation()).thenReturn(null);
|
||||
when(processGroup.getProcessGroups()).thenReturn(Sets.newHashSet(childProcessGroup));
|
||||
when(processGroup.getProcessGroups()).thenReturn(Collections.singleton(childProcessGroup));
|
||||
when(childProcessGroup.getVersionControlInformation()).thenReturn(null);
|
||||
|
||||
assertFalse(serviceFacade.isAnyProcessGroupUnderVersionControl(groupId));
|
||||
|
@ -404,7 +405,7 @@ public class StandardNiFiServiceFacadeTest {
|
|||
|
||||
final VersionControlInformation vci = mock(VersionControlInformation.class);
|
||||
when(processGroup.getVersionControlInformation()).thenReturn(vci);
|
||||
when(processGroup.getProcessGroups()).thenReturn(Sets.newHashSet());
|
||||
when(processGroup.getProcessGroups()).thenReturn(new HashSet<>());
|
||||
|
||||
assertTrue(serviceFacade.isAnyProcessGroupUnderVersionControl(groupId));
|
||||
}
|
||||
|
@ -419,7 +420,7 @@ public class StandardNiFiServiceFacadeTest {
|
|||
|
||||
final VersionControlInformation vci = mock(VersionControlInformation.class);
|
||||
when(processGroup.getVersionControlInformation()).thenReturn(null);
|
||||
when(processGroup.getProcessGroups()).thenReturn(Sets.newHashSet(childProcessGroup));
|
||||
when(processGroup.getProcessGroups()).thenReturn(Collections.singleton(childProcessGroup));
|
||||
when(childProcessGroup.getVersionControlInformation()).thenReturn(vci);
|
||||
|
||||
assertTrue(serviceFacade.isAnyProcessGroupUnderVersionControl(groupId));
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.web.api;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.nifi.registry.flow.VersionedFlowSnapshot;
|
||||
import org.apache.nifi.registry.flow.VersionedFlowSnapshotMetadata;
|
||||
import org.apache.nifi.flow.VersionedProcessGroup;
|
||||
|
@ -28,6 +27,7 @@ import org.mockito.Mock;
|
|||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
|
||||
import javax.ws.rs.core.Response;
|
||||
import java.util.Collections;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
@ -62,8 +62,8 @@ public class TestVersionsResource {
|
|||
|
||||
final VersionedProcessGroup innerVersionedProcessGroup = mock(VersionedProcessGroup.class);
|
||||
final VersionedProcessGroup innerInnerVersionedProcessGroup = mock(VersionedProcessGroup.class);
|
||||
when(versionedProcessGroup.getProcessGroups()).thenReturn(Sets.newHashSet(innerVersionedProcessGroup));
|
||||
when(innerVersionedProcessGroup.getProcessGroups()).thenReturn(Sets.newHashSet(innerInnerVersionedProcessGroup));
|
||||
when(versionedProcessGroup.getProcessGroups()).thenReturn(Collections.singleton(innerVersionedProcessGroup));
|
||||
when(innerVersionedProcessGroup.getProcessGroups()).thenReturn(Collections.singleton(innerInnerVersionedProcessGroup));
|
||||
|
||||
final Response response = versionsResource.exportFlowVersion(groupId);
|
||||
|
||||
|
|
|
@ -169,10 +169,6 @@
|
|||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcpkix-jdk15on</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-jdbc</artifactId>
|
||||
|
|
|
@ -16,8 +16,8 @@
|
|||
*/
|
||||
package org.apache.nifi.web.security.logout;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.apache.nifi.util.StringUtils;
|
||||
import org.apache.nifi.web.security.util.CacheKey;
|
||||
|
||||
|
@ -33,7 +33,7 @@ public class LogoutRequestManager {
|
|||
}
|
||||
|
||||
public LogoutRequestManager(final int cacheExpiration, final TimeUnit units) {
|
||||
this.requestLookup = CacheBuilder.newBuilder().expireAfterWrite(cacheExpiration, units).build();
|
||||
this.requestLookup = Caffeine.newBuilder().expireAfterWrite(cacheExpiration, units).build();
|
||||
}
|
||||
|
||||
public void start(final LogoutRequest logoutRequest) {
|
||||
|
|
|
@ -16,8 +16,8 @@
|
|||
*/
|
||||
package org.apache.nifi.web.security.oidc;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import com.nimbusds.oauth2.sdk.AuthorizationGrant;
|
||||
import com.nimbusds.oauth2.sdk.Scope;
|
||||
import com.nimbusds.oauth2.sdk.id.State;
|
||||
|
@ -27,7 +27,6 @@ import org.apache.nifi.web.security.util.IdentityProviderUtils;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.apache.nifi.web.security.oidc.StandardOidcIdentityProvider.OPEN_ID_CONNECT_SUPPORT_IS_NOT_CONFIGURED;
|
||||
|
@ -37,9 +36,9 @@ import static org.apache.nifi.web.security.oidc.StandardOidcIdentityProvider.OPE
|
|||
*/
|
||||
public class OidcService {
|
||||
|
||||
private OidcIdentityProvider identityProvider;
|
||||
private Cache<CacheKey, State> stateLookupForPendingRequests; // identifier from cookie -> state value
|
||||
private Cache<CacheKey, String> jwtLookupForCompletedRequests; // identifier from cookie -> jwt or identity (and generate jwt on retrieval)
|
||||
private final OidcIdentityProvider identityProvider;
|
||||
private final Cache<CacheKey, State> stateLookupForPendingRequests; // identifier from cookie -> state value
|
||||
private final Cache<CacheKey, String> jwtLookupForCompletedRequests; // identifier from cookie -> jwt or identity (and generate jwt on retrieval)
|
||||
|
||||
/**
|
||||
* Creates a new OIDC with an expiration of 1 minute.
|
||||
|
@ -66,8 +65,8 @@ public class OidcService {
|
|||
|
||||
identityProvider.initializeProvider();
|
||||
this.identityProvider = identityProvider;
|
||||
this.stateLookupForPendingRequests = CacheBuilder.newBuilder().expireAfterWrite(duration, units).build();
|
||||
this.jwtLookupForCompletedRequests = CacheBuilder.newBuilder().expireAfterWrite(duration, units).build();
|
||||
this.stateLookupForPendingRequests = Caffeine.newBuilder().expireAfterWrite(duration, units).build();
|
||||
this.jwtLookupForCompletedRequests = Caffeine.newBuilder().expireAfterWrite(duration, units).build();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -138,15 +137,11 @@ public class OidcService {
|
|||
final CacheKey oidcRequestIdentifierKey = new CacheKey(oidcRequestIdentifier);
|
||||
final State state = new State(IdentityProviderUtils.generateStateValue());
|
||||
|
||||
try {
|
||||
synchronized (stateLookupForPendingRequests) {
|
||||
final State cachedState = stateLookupForPendingRequests.get(oidcRequestIdentifierKey, () -> state);
|
||||
if (!IdentityProviderUtils.timeConstantEqualityCheck(state.getValue(), cachedState.getValue())) {
|
||||
throw new IllegalStateException("An existing login request is already in progress.");
|
||||
}
|
||||
synchronized (stateLookupForPendingRequests) {
|
||||
final State cachedState = stateLookupForPendingRequests.get(oidcRequestIdentifierKey, key -> state);
|
||||
if (!IdentityProviderUtils.timeConstantEqualityCheck(state.getValue(), cachedState.getValue())) {
|
||||
throw new IllegalStateException("An existing login request is already in progress.");
|
||||
}
|
||||
} catch (ExecutionException e) {
|
||||
throw new IllegalStateException("Unable to store the login request state.");
|
||||
}
|
||||
|
||||
return state;
|
||||
|
@ -237,16 +232,13 @@ public class OidcService {
|
|||
*/
|
||||
public void storeJwt(final String oidcRequestIdentifier, final String jwt) {
|
||||
final CacheKey oidcRequestIdentifierKey = new CacheKey(oidcRequestIdentifier);
|
||||
try {
|
||||
// Cache the jwt for later retrieval
|
||||
synchronized (jwtLookupForCompletedRequests) {
|
||||
final String cachedJwt = jwtLookupForCompletedRequests.get(oidcRequestIdentifierKey, () -> jwt);
|
||||
if (!IdentityProviderUtils.timeConstantEqualityCheck(jwt, cachedJwt)) {
|
||||
throw new IllegalStateException("An existing login request is already in progress.");
|
||||
}
|
||||
|
||||
// Cache the jwt for later retrieval
|
||||
synchronized (jwtLookupForCompletedRequests) {
|
||||
final String cachedJwt = jwtLookupForCompletedRequests.get(oidcRequestIdentifierKey, key -> jwt);
|
||||
if (!IdentityProviderUtils.timeConstantEqualityCheck(jwt, cachedJwt)) {
|
||||
throw new IllegalStateException("An existing login request is already in progress.");
|
||||
}
|
||||
} catch (final ExecutionException e) {
|
||||
throw new IllegalStateException("Unable to store the login authentication token.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,8 +16,8 @@
|
|||
*/
|
||||
package org.apache.nifi.web.security.saml.impl;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.apache.nifi.util.StringUtils;
|
||||
import org.apache.nifi.web.security.jwt.provider.BearerTokenProvider;
|
||||
import org.apache.nifi.web.security.saml.SAMLStateManager;
|
||||
|
@ -27,7 +27,6 @@ import org.apache.nifi.web.security.util.IdentityProviderUtils;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class StandardSAMLStateManager implements SAMLStateManager {
|
||||
|
@ -48,8 +47,8 @@ public class StandardSAMLStateManager implements SAMLStateManager {
|
|||
|
||||
public StandardSAMLStateManager(final BearerTokenProvider bearerTokenProvider, final int cacheExpiration, final TimeUnit units) {
|
||||
this.bearerTokenProvider = bearerTokenProvider;
|
||||
this.stateLookupForPendingRequests = CacheBuilder.newBuilder().expireAfterWrite(cacheExpiration, units).build();
|
||||
this.jwtLookupForCompletedRequests = CacheBuilder.newBuilder().expireAfterWrite(cacheExpiration, units).build();
|
||||
this.stateLookupForPendingRequests = Caffeine.newBuilder().expireAfterWrite(cacheExpiration, units).build();
|
||||
this.jwtLookupForCompletedRequests = Caffeine.newBuilder().expireAfterWrite(cacheExpiration, units).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -61,15 +60,11 @@ public class StandardSAMLStateManager implements SAMLStateManager {
|
|||
final CacheKey requestIdentifierKey = new CacheKey(requestIdentifier);
|
||||
final String state = IdentityProviderUtils.generateStateValue();
|
||||
|
||||
try {
|
||||
synchronized (stateLookupForPendingRequests) {
|
||||
final String cachedState = stateLookupForPendingRequests.get(requestIdentifierKey, () -> state);
|
||||
if (!IdentityProviderUtils.timeConstantEqualityCheck(state, cachedState)) {
|
||||
throw new IllegalStateException("An existing login request is already in progress.");
|
||||
}
|
||||
synchronized (stateLookupForPendingRequests) {
|
||||
final String cachedState = stateLookupForPendingRequests.get(requestIdentifierKey, key -> state);
|
||||
if (!IdentityProviderUtils.timeConstantEqualityCheck(state, cachedState)) {
|
||||
throw new IllegalStateException("An existing login request is already in progress.");
|
||||
}
|
||||
} catch (ExecutionException e) {
|
||||
throw new IllegalStateException("Unable to store the login request state.");
|
||||
}
|
||||
|
||||
return state;
|
||||
|
@ -109,16 +104,12 @@ public class StandardSAMLStateManager implements SAMLStateManager {
|
|||
|
||||
final CacheKey requestIdentifierKey = new CacheKey(requestIdentifier);
|
||||
final String bearerToken = bearerTokenProvider.getBearerToken(token);
|
||||
try {
|
||||
// cache the jwt for later retrieval
|
||||
synchronized (jwtLookupForCompletedRequests) {
|
||||
final String cachedJwt = jwtLookupForCompletedRequests.get(requestIdentifierKey, () -> bearerToken);
|
||||
if (!IdentityProviderUtils.timeConstantEqualityCheck(bearerToken, cachedJwt)) {
|
||||
throw new IllegalStateException("An existing login request is already in progress.");
|
||||
}
|
||||
// cache the jwt for later retrieval
|
||||
synchronized (jwtLookupForCompletedRequests) {
|
||||
final String cachedJwt = jwtLookupForCompletedRequests.get(requestIdentifierKey, key -> bearerToken);
|
||||
if (!IdentityProviderUtils.timeConstantEqualityCheck(bearerToken, cachedJwt)) {
|
||||
throw new IllegalStateException("An existing login request is already in progress.");
|
||||
}
|
||||
} catch (final ExecutionException e) {
|
||||
throw new IllegalStateException("Unable to store the login authentication token.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,10 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.web.security.x509.ocsp;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.google.common.util.concurrent.UncheckedExecutionException;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -41,6 +37,9 @@ import javax.ws.rs.client.Client;
|
|||
import javax.ws.rs.client.Entity;
|
||||
import javax.ws.rs.client.WebTarget;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import com.github.benmanes.caffeine.cache.LoadingCache;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.security.util.KeyStoreUtils;
|
||||
import org.apache.nifi.security.util.SslContextFactory;
|
||||
|
@ -127,17 +126,14 @@ public class OcspCertificateValidator {
|
|||
final long cacheDurationMillis = FormatUtils.getTimeDuration("12 hours", TimeUnit.MILLISECONDS);
|
||||
|
||||
// build the ocsp cache
|
||||
ocspCache = CacheBuilder.newBuilder().expireAfterWrite(cacheDurationMillis, TimeUnit.MILLISECONDS).build(new CacheLoader<OcspRequest, OcspStatus>() {
|
||||
@Override
|
||||
public OcspStatus load(OcspRequest ocspRequest) throws Exception {
|
||||
final String subjectDn = ocspRequest.getSubjectCertificate().getSubjectX500Principal().getName();
|
||||
ocspCache = Caffeine.newBuilder().expireAfterWrite(cacheDurationMillis, TimeUnit.MILLISECONDS).build(ocspRequest -> {
|
||||
final String subjectDn = ocspRequest.getSubjectCertificate().getSubjectX500Principal().getName();
|
||||
|
||||
logger.info(String.format("Validating client certificate via OCSP: <%s>", subjectDn));
|
||||
final OcspStatus ocspStatus = getOcspStatus(ocspRequest);
|
||||
logger.info(String.format("Client certificate status for <%s>: %s", subjectDn, ocspStatus.toString()));
|
||||
logger.info(String.format("Validating client certificate via OCSP: <%s>", subjectDn));
|
||||
final OcspStatus ocspStatus = getOcspStatus(ocspRequest);
|
||||
logger.info(String.format("Client certificate status for <%s>: %s", subjectDn, ocspStatus.toString()));
|
||||
|
||||
return ocspStatus;
|
||||
}
|
||||
return ocspStatus;
|
||||
});
|
||||
} catch (final Exception e) {
|
||||
logger.error("Disabling OCSP certificate validation. Unable to load OCSP configuration: " + e, e);
|
||||
|
@ -234,17 +230,13 @@ public class OcspCertificateValidator {
|
|||
// create the ocsp status key
|
||||
final OcspRequest ocspRequest = new OcspRequest(subjectCertificate, issuerCertificate);
|
||||
|
||||
try {
|
||||
// determine the status and ensure it isn't verified as revoked
|
||||
final OcspStatus ocspStatus = ocspCache.getUnchecked(ocspRequest);
|
||||
// determine the status and ensure it isn't verified as revoked
|
||||
final OcspStatus ocspStatus = ocspCache.get(ocspRequest);
|
||||
|
||||
// we only disallow when we have a verified response that states the certificate is revoked
|
||||
if (VerificationStatus.Verified.equals(ocspStatus.getVerificationStatus()) && ValidationStatus.Revoked.equals(ocspStatus.getValidationStatus())) {
|
||||
throw new CertificateStatusException(String.format("Client certificate for <%s> is revoked according to the certificate authority.",
|
||||
subjectCertificate.getSubjectX500Principal().getName()));
|
||||
}
|
||||
} catch (final UncheckedExecutionException uee) {
|
||||
logger.warn(String.format("Unable to validate client certificate via OCSP: <%s>", subjectCertificate.getSubjectX500Principal().getName()), uee.getCause());
|
||||
// we only disallow when we have a verified response that states the certificate is revoked
|
||||
if (VerificationStatus.Verified.equals(ocspStatus.getVerificationStatus()) && ValidationStatus.Revoked.equals(ocspStatus.getValidationStatus())) {
|
||||
throw new CertificateStatusException(String.format("Client certificate for <%s> is revoked according to the certificate authority.",
|
||||
subjectCertificate.getSubjectX500Principal().getName()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -356,7 +348,7 @@ public class OcspCertificateValidator {
|
|||
|
||||
// ensure the appropriate response object
|
||||
final Object ocspResponseObject = ocspResponse.getResponseObject();
|
||||
if (ocspResponseObject == null || !(ocspResponseObject instanceof BasicOCSPResp)) {
|
||||
if (!(ocspResponseObject instanceof BasicOCSPResp)) {
|
||||
logger.warn(String.format("Unexpected OCSP response object: %s", ocspResponseObject));
|
||||
return ocspStatus;
|
||||
}
|
||||
|
|
|
@ -1,391 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.nifi.web.security.x509.ocsp
|
||||
|
||||
import com.google.common.cache.CacheBuilder
|
||||
import com.google.common.cache.CacheLoader
|
||||
import com.google.common.cache.LoadingCache
|
||||
import org.apache.nifi.util.NiFiProperties
|
||||
import org.bouncycastle.asn1.x500.X500Name
|
||||
import org.bouncycastle.asn1.x509.ExtendedKeyUsage
|
||||
import org.bouncycastle.asn1.x509.Extension
|
||||
import org.bouncycastle.asn1.x509.KeyPurposeId
|
||||
import org.bouncycastle.asn1.x509.KeyUsage
|
||||
import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo
|
||||
import org.bouncycastle.cert.X509CertificateHolder
|
||||
import org.bouncycastle.cert.X509v3CertificateBuilder
|
||||
import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter
|
||||
import org.bouncycastle.jce.provider.BouncyCastleProvider
|
||||
import org.bouncycastle.operator.ContentSigner
|
||||
import org.bouncycastle.operator.OperatorCreationException
|
||||
import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder
|
||||
import org.junit.After
|
||||
import org.junit.Before
|
||||
import org.junit.BeforeClass
|
||||
import org.junit.Ignore
|
||||
import org.junit.Test
|
||||
import org.slf4j.Logger
|
||||
import org.slf4j.LoggerFactory
|
||||
|
||||
import javax.ws.rs.client.ClientBuilder
|
||||
import java.security.InvalidKeyException
|
||||
import java.security.KeyPair
|
||||
import java.security.KeyPairGenerator
|
||||
import java.security.NoSuchAlgorithmException
|
||||
import java.security.NoSuchProviderException
|
||||
import java.security.PrivateKey
|
||||
import java.security.PublicKey
|
||||
import java.security.Security
|
||||
import java.security.SignatureException
|
||||
import java.security.cert.CertificateException
|
||||
import java.security.cert.X509Certificate
|
||||
|
||||
import static groovy.test.GroovyAssert.shouldFail
|
||||
import static org.junit.Assert.fail
|
||||
|
||||
class OcspCertificateValidatorGroovyTest {
|
||||
private static final Logger logger = LoggerFactory.getLogger(OcspCertificateValidatorGroovyTest.class)
|
||||
|
||||
private static final int KEY_SIZE = 2048
|
||||
|
||||
private static final long YESTERDAY = System.currentTimeMillis() - 24 * 60 * 60 * 1000
|
||||
private static final long ONE_YEAR_FROM_NOW = System.currentTimeMillis() + 365 * 24 * 60 * 60 * 1000
|
||||
private static final String SIGNATURE_ALGORITHM = "SHA256withRSA"
|
||||
private static final String PROVIDER = "BC"
|
||||
|
||||
private static final String SUBJECT_DN = "CN=NiFi Test Server,OU=Security,O=Apache,ST=CA,C=US"
|
||||
private static final String ISSUER_DN = "CN=NiFi Test CA,OU=Security,O=Apache,ST=CA,C=US"
|
||||
|
||||
private NiFiProperties mockProperties
|
||||
|
||||
// System under test
|
||||
OcspCertificateValidator certificateValidator
|
||||
|
||||
@BeforeClass
|
||||
static void setUpOnce() throws Exception {
|
||||
Security.addProvider(new BouncyCastleProvider())
|
||||
}
|
||||
|
||||
@Before
|
||||
void setUp() throws Exception {
|
||||
mockProperties = new NiFiProperties() {
|
||||
@Override
|
||||
String getProperty(String key) {
|
||||
return 'value_for_' + key
|
||||
}
|
||||
|
||||
@Override
|
||||
Set<String> getPropertyKeys() {
|
||||
return ["A", "set"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
void tearDown() throws Exception {
|
||||
certificateValidator?.metaClass = null
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a public/private RSA keypair using the default key size.
|
||||
*
|
||||
* @return the keypair
|
||||
* @throws NoSuchAlgorithmException if the RSA algorithm is not available
|
||||
*/
|
||||
private static KeyPair generateKeyPair() throws NoSuchAlgorithmException {
|
||||
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA")
|
||||
keyPairGenerator.initialize(KEY_SIZE)
|
||||
return keyPairGenerator.generateKeyPair()
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a signed certificate using an on-demand keypair.
|
||||
*
|
||||
* @param dn the DN
|
||||
* @return the certificate
|
||||
* @throws IOException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws CertificateException
|
||||
* @throws NoSuchProviderException
|
||||
* @throws SignatureException
|
||||
* @throws InvalidKeyException
|
||||
* @throws OperatorCreationException
|
||||
*/
|
||||
private
|
||||
static X509Certificate generateCertificate(String dn) throws IOException, NoSuchAlgorithmException, CertificateException, NoSuchProviderException, SignatureException, InvalidKeyException, OperatorCreationException {
|
||||
KeyPair keyPair = generateKeyPair()
|
||||
return generateCertificate(dn, keyPair)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a signed certificate with a specific keypair.
|
||||
*
|
||||
* @param dn the DN
|
||||
* @param keyPair the public key will be included in the certificate and the the private key is used to sign the certificate
|
||||
* @return the certificate
|
||||
* @throws IOException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws CertificateException
|
||||
* @throws NoSuchProviderException
|
||||
* @throws SignatureException
|
||||
* @throws InvalidKeyException
|
||||
* @throws OperatorCreationException
|
||||
*/
|
||||
private
|
||||
static X509Certificate generateCertificate(String dn, KeyPair keyPair) throws IOException, NoSuchAlgorithmException, CertificateException, NoSuchProviderException, SignatureException, InvalidKeyException, OperatorCreationException {
|
||||
PrivateKey privateKey = keyPair.getPrivate()
|
||||
ContentSigner sigGen = new JcaContentSignerBuilder(SIGNATURE_ALGORITHM).setProvider(PROVIDER).build(privateKey)
|
||||
SubjectPublicKeyInfo subPubKeyInfo = SubjectPublicKeyInfo.getInstance(keyPair.getPublic().getEncoded())
|
||||
Date startDate = new Date(YESTERDAY)
|
||||
Date endDate = new Date(ONE_YEAR_FROM_NOW)
|
||||
|
||||
X509v3CertificateBuilder certBuilder = new X509v3CertificateBuilder(
|
||||
new X500Name(dn),
|
||||
BigInteger.valueOf(System.currentTimeMillis()),
|
||||
startDate, endDate,
|
||||
new X500Name(dn),
|
||||
subPubKeyInfo)
|
||||
|
||||
// Set certificate extensions
|
||||
// (1) digitalSignature extension
|
||||
certBuilder.addExtension(Extension.keyUsage, true,
|
||||
new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyEncipherment | KeyUsage.dataEncipherment | KeyUsage.keyAgreement))
|
||||
|
||||
// (2) extendedKeyUsage extension
|
||||
Vector<KeyPurposeId> ekUsages = new Vector<>()
|
||||
ekUsages.add(KeyPurposeId.id_kp_clientAuth)
|
||||
ekUsages.add(KeyPurposeId.id_kp_serverAuth)
|
||||
certBuilder.addExtension(Extension.extendedKeyUsage, false, new ExtendedKeyUsage(ekUsages))
|
||||
|
||||
// Sign the certificate
|
||||
X509CertificateHolder certificateHolder = certBuilder.build(sigGen)
|
||||
return new JcaX509CertificateConverter().setProvider(PROVIDER)
|
||||
.getCertificate(certificateHolder)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a certificate signed by the issuer key.
|
||||
*
|
||||
* @param dn the subject DN
|
||||
* @param issuerDn the issuer DN
|
||||
* @param issuerKey the issuer private key
|
||||
* @return the certificate
|
||||
* @throws IOException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws CertificateException
|
||||
* @throws NoSuchProviderException
|
||||
* @throws SignatureException
|
||||
* @throws InvalidKeyException
|
||||
* @throws OperatorCreationException
|
||||
*/
|
||||
private
|
||||
static X509Certificate generateIssuedCertificate(String dn, String issuerDn, PrivateKey issuerKey) throws IOException, NoSuchAlgorithmException, CertificateException, NoSuchProviderException, SignatureException, InvalidKeyException, OperatorCreationException {
|
||||
KeyPair keyPair = generateKeyPair()
|
||||
return generateIssuedCertificate(dn, keyPair.getPublic(), issuerDn, issuerKey)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a certificate with a specific public key signed by the issuer key.
|
||||
*
|
||||
* @param dn the subject DN
|
||||
* @param publicKey the subject public key
|
||||
* @param issuerDn the issuer DN
|
||||
* @param issuerKey the issuer private key
|
||||
* @return the certificate
|
||||
* @throws IOException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws CertificateException
|
||||
* @throws NoSuchProviderException
|
||||
* @throws SignatureException
|
||||
* @throws InvalidKeyException
|
||||
* @throws OperatorCreationException
|
||||
*/
|
||||
private
|
||||
static X509Certificate generateIssuedCertificate(String dn, PublicKey publicKey, String issuerDn, PrivateKey issuerKey) throws IOException, NoSuchAlgorithmException, CertificateException, NoSuchProviderException, SignatureException, InvalidKeyException, OperatorCreationException {
|
||||
ContentSigner sigGen = new JcaContentSignerBuilder(SIGNATURE_ALGORITHM).setProvider(PROVIDER).build(issuerKey)
|
||||
SubjectPublicKeyInfo subPubKeyInfo = SubjectPublicKeyInfo.getInstance(publicKey.getEncoded())
|
||||
Date startDate = new Date(YESTERDAY)
|
||||
Date endDate = new Date(ONE_YEAR_FROM_NOW)
|
||||
|
||||
X509v3CertificateBuilder v3CertGen = new X509v3CertificateBuilder(
|
||||
new X500Name(issuerDn),
|
||||
BigInteger.valueOf(System.currentTimeMillis()),
|
||||
startDate, endDate,
|
||||
new X500Name(dn),
|
||||
subPubKeyInfo)
|
||||
|
||||
X509CertificateHolder certificateHolder = v3CertGen.build(sigGen)
|
||||
return new JcaX509CertificateConverter().setProvider(PROVIDER)
|
||||
.getCertificate(certificateHolder)
|
||||
}
|
||||
|
||||
private static X509Certificate[] generateCertificateChain(String dn = SUBJECT_DN, String issuerDn = ISSUER_DN) {
|
||||
final KeyPair issuerKeyPair = generateKeyPair()
|
||||
final PrivateKey issuerPrivateKey = issuerKeyPair.getPrivate()
|
||||
|
||||
final X509Certificate issuerCertificate = generateCertificate(issuerDn, issuerKeyPair)
|
||||
final X509Certificate certificate = generateIssuedCertificate(dn, issuerDn, issuerPrivateKey)
|
||||
[certificate, issuerCertificate] as X509Certificate[]
|
||||
}
|
||||
|
||||
@Test
|
||||
void testShouldGenerateCertificate() throws Exception {
|
||||
// Arrange
|
||||
final String testDn = "CN=This is a test"
|
||||
|
||||
// Act
|
||||
X509Certificate certificate = generateCertificate(testDn)
|
||||
logger.info("Generated certificate: \n{}", certificate)
|
||||
|
||||
// Assert
|
||||
assert certificate.getSubjectDN().getName() == testDn
|
||||
assert certificate.getIssuerDN().getName() == testDn
|
||||
certificate.verify(certificate.getPublicKey())
|
||||
}
|
||||
|
||||
@Test
|
||||
void testShouldGenerateCertificateFromKeyPair() throws Exception {
|
||||
// Arrange
|
||||
final String testDn = "CN=This is a test"
|
||||
final KeyPair keyPair = generateKeyPair()
|
||||
|
||||
// Act
|
||||
X509Certificate certificate = generateCertificate(testDn, keyPair)
|
||||
logger.info("Generated certificate: \n{}", certificate)
|
||||
|
||||
// Assert
|
||||
assert certificate.getPublicKey() == keyPair.getPublic()
|
||||
assert certificate.getSubjectDN().getName() == testDn
|
||||
assert certificate.getIssuerDN().getName() == testDn
|
||||
certificate.verify(certificate.getPublicKey())
|
||||
}
|
||||
|
||||
@Test
|
||||
void testShouldGenerateIssuedCertificate() throws Exception {
|
||||
// Arrange
|
||||
final String testDn = "CN=This is a signed test"
|
||||
final String issuerDn = "CN=Issuer CA"
|
||||
final KeyPair issuerKeyPair = generateKeyPair()
|
||||
final PrivateKey issuerPrivateKey = issuerKeyPair.getPrivate()
|
||||
|
||||
final X509Certificate issuerCertificate = generateCertificate(issuerDn, issuerKeyPair)
|
||||
logger.info("Generated issuer certificate: \n{}", issuerCertificate)
|
||||
|
||||
// Act
|
||||
X509Certificate certificate = generateIssuedCertificate(testDn, issuerDn, issuerPrivateKey)
|
||||
logger.info("Generated signed certificate: \n{}", certificate)
|
||||
|
||||
// Assert
|
||||
assert issuerCertificate.getPublicKey() == issuerKeyPair.getPublic()
|
||||
assert certificate.getSubjectX500Principal().getName() == testDn
|
||||
assert certificate.getIssuerX500Principal().getName() == issuerDn
|
||||
certificate.verify(issuerCertificate.getPublicKey())
|
||||
|
||||
try {
|
||||
certificate.verify(certificate.getPublicKey())
|
||||
fail("Should have thrown exception")
|
||||
} catch (Exception e) {
|
||||
assert e instanceof SignatureException
|
||||
assert e.getMessage().contains("certificate does not verify with supplied key")
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testShouldValidateCertificate() throws Exception {
|
||||
// Arrange
|
||||
X509Certificate[] certificateChain = generateCertificateChain()
|
||||
|
||||
certificateValidator = new OcspCertificateValidator(mockProperties)
|
||||
|
||||
// Must populate the client even though it is not used in this check
|
||||
certificateValidator.client = ClientBuilder.newBuilder().build()
|
||||
|
||||
// Form a map of the request to a good status and load it into the cache
|
||||
OcspRequest revokedRequest = new OcspRequest(certificateChain.first(), certificateChain.last())
|
||||
OcspStatus revokedStatus = new OcspStatus()
|
||||
revokedStatus.responseStatus = OcspStatus.ResponseStatus.Successful
|
||||
revokedStatus.validationStatus = OcspStatus.ValidationStatus.Good
|
||||
revokedStatus.verificationStatus = OcspStatus.VerificationStatus.Verified
|
||||
LoadingCache<OcspRequest, OcspStatus> cacheWithRevokedCertificate = buildCacheWithContents([(revokedRequest): revokedStatus])
|
||||
certificateValidator.ocspCache = cacheWithRevokedCertificate
|
||||
|
||||
// Act
|
||||
certificateValidator.validate(certificateChain)
|
||||
|
||||
// Assert
|
||||
assert true
|
||||
}
|
||||
|
||||
// TODO - NIFI-1364
|
||||
@Ignore("To be implemented with Groovy test")
|
||||
@Test
|
||||
void testShouldNotValidateEmptyCertificate() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testShouldNotValidateRevokedCertificate() throws Exception {
|
||||
// Arrange
|
||||
X509Certificate[] certificateChain = generateCertificateChain()
|
||||
|
||||
certificateValidator = new OcspCertificateValidator(mockProperties)
|
||||
|
||||
// Must populate the client even though it is not used in this check
|
||||
certificateValidator.client = ClientBuilder.newBuilder().build()
|
||||
|
||||
// Form a map of the request to a revoked status and load it into the cache
|
||||
OcspRequest revokedRequest = new OcspRequest(certificateChain.first(), certificateChain.last())
|
||||
OcspStatus revokedStatus = new OcspStatus()
|
||||
revokedStatus.responseStatus = OcspStatus.ResponseStatus.Successful
|
||||
revokedStatus.validationStatus = OcspStatus.ValidationStatus.Revoked
|
||||
revokedStatus.verificationStatus = OcspStatus.VerificationStatus.Verified
|
||||
LoadingCache<OcspRequest, OcspStatus> cacheWithRevokedCertificate = buildCacheWithContents([(revokedRequest): revokedStatus])
|
||||
certificateValidator.ocspCache = cacheWithRevokedCertificate
|
||||
|
||||
// Act
|
||||
def msg = shouldFail(CertificateStatusException) {
|
||||
certificateValidator.validate(certificateChain)
|
||||
}
|
||||
|
||||
// Assert
|
||||
assert msg =~ "is revoked according to the certificate authority"
|
||||
}
|
||||
|
||||
LoadingCache<OcspRequest, OcspStatus> buildCacheWithContents(Map map) {
|
||||
CacheBuilder.newBuilder().build(new CacheLoader<OcspRequest, OcspStatus>() {
|
||||
@Override
|
||||
OcspStatus load(OcspRequest ocspRequest) throws Exception {
|
||||
logger.info("Mock cache implementation load(${ocspRequest}) returns ${map.get(ocspRequest)}")
|
||||
return map.get(ocspRequest) as OcspStatus
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TODO - NIFI-1364
|
||||
@Ignore("To be implemented with Groovy test")
|
||||
@Test
|
||||
void testValidateShouldHandleUnsignedResponse() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
// TODO - NIFI-1364
|
||||
@Ignore("To be implemented with Groovy test")
|
||||
@Test
|
||||
void testValidateShouldHandleResponseWithIncorrectNonce() throws Exception {
|
||||
|
||||
}
|
||||
}
|
|
@ -29,11 +29,8 @@ import org.bouncycastle.jce.provider.BouncyCastleProvider;
|
|||
import org.bouncycastle.operator.ContentSigner;
|
||||
import org.bouncycastle.operator.OperatorCreationException;
|
||||
import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -64,24 +61,9 @@ public class OcspCertificateValidatorTest {
|
|||
private static final String SIGNATURE_ALGORITHM = "SHA256withRSA";
|
||||
private static final String PROVIDER = "BC";
|
||||
|
||||
private static final String ISSUER_DN = "CN=NiFi Test CA,OU=Security,O=Apache,ST=CA,C=US";
|
||||
|
||||
private static X509Certificate ISSUER_CERTIFICATE;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpOnce() throws Exception {
|
||||
Security.addProvider(new BouncyCastleProvider());
|
||||
|
||||
// ISSUER_CERTIFICATE = generateCertificate(ISSUER_DN);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -278,34 +260,4 @@ public class OcspCertificateValidatorTest {
|
|||
assert e.getMessage().contains("certificate does not verify with supplied key");
|
||||
}
|
||||
}
|
||||
|
||||
@Ignore("To be implemented with Groovy test")
|
||||
@Test
|
||||
public void testShouldValidateCertificate() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Ignore("To be implemented with Groovy test")
|
||||
@Test
|
||||
public void testShouldNotValidateEmptyCertificate() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Ignore("To be implemented with Groovy test")
|
||||
@Test
|
||||
public void testShouldNotValidateInvalidCertificate() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Ignore("To be implemented with Groovy test")
|
||||
@Test
|
||||
public void testValidateShouldHandleUnsignedResponse() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Ignore("To be implemented with Groovy test")
|
||||
@Test
|
||||
public void testValidateShouldHandleResponseWithIncorrectNonce() throws Exception {
|
||||
|
||||
}
|
||||
}
|
|
@ -314,11 +314,6 @@
|
|||
<artifactId>h2</artifactId>
|
||||
<version>1.4.199</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>28.0-jre</version>
|
||||
</dependency>
|
||||
|
||||
<!-- open id connect - override transitive dependency version ranges -->
|
||||
<dependency>
|
||||
|
|
|
@ -264,10 +264,6 @@
|
|||
<artifactId>bcrypt</artifactId>
|
||||
<version>0.9.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jcl-over-slf4j</artifactId>
|
||||
|
@ -284,12 +280,6 @@
|
|||
<version>1.16.0-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client</artifactId>
|
||||
<version>1.28.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.activemq</groupId>
|
||||
<artifactId>activemq-broker</artifactId>
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttributes;
|
||||
|
@ -227,7 +226,7 @@ public class CalculateRecordStats extends AbstractProcessor {
|
|||
|
||||
List<Map.Entry<String, Integer>> _flat = new ArrayList<>(toFilter.entrySet());
|
||||
_flat.sort(Map.Entry.comparingByValue());
|
||||
_flat = Lists.reverse(_flat);
|
||||
Collections.reverse(_flat);
|
||||
for (int index = 0; index < _flat.size() && index < limit; index++) {
|
||||
retVal.put(_flat.get(index).getKey(), _flat.get(index).getValue());
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -52,8 +53,6 @@ import org.apache.nifi.processor.io.InputStreamCallback;
|
|||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.eclipse.jetty.util.StringUtil;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
@EventDriven
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
|
@ -248,8 +247,8 @@ public class LogAttribute extends AbstractProcessor {
|
|||
// collect properties
|
||||
final String attrsToLogValue = context.getProperty(ATTRIBUTES_TO_LOG_CSV).getValue();
|
||||
final String attrsToRemoveValue = context.getProperty(ATTRIBUTES_TO_IGNORE_CSV).getValue();
|
||||
final Set<String> attrsToLog = StringUtils.isBlank(attrsToLogValue) ? Sets.newHashSet(flowFileAttrKeys) : Sets.newHashSet(attrsToLogValue.split("\\s*,\\s*"));
|
||||
final Set<String> attrsToRemove = StringUtils.isBlank(attrsToRemoveValue) ? Sets.newHashSet() : Sets.newHashSet(attrsToRemoveValue.split("\\s*,\\s*"));
|
||||
final Set<String> attrsToLog = StringUtils.isBlank(attrsToLogValue) ? new HashSet<>(flowFileAttrKeys) : new HashSet<>(Arrays.asList(attrsToLogValue.split("\\s*,\\s*")));
|
||||
final Set<String> attrsToRemove = StringUtils.isBlank(attrsToRemoveValue) ? new HashSet<>() : new HashSet<>(Arrays.asList(attrsToRemoveValue.split("\\s*,\\s*")));
|
||||
final Pattern attrsToLogRegex = Pattern.compile(context.getProperty(ATTRIBUTES_TO_LOG_REGEX).getValue());
|
||||
final String attrsToRemoveRegexValue = context.getProperty(ATTRIBUTES_TO_IGNORE_REGEX).getValue();
|
||||
final Pattern attrsToRemoveRegex = attrsToRemoveRegexValue == null ? null : Pattern.compile(context.getProperty(ATTRIBUTES_TO_IGNORE_REGEX).getValue());
|
||||
|
|
|
@ -17,8 +17,7 @@
|
|||
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.DynamicRelationship;
|
||||
|
@ -210,15 +209,13 @@ public class RouteText extends AbstractProcessor {
|
|||
private volatile Map<Relationship, PropertyValue> propertyMap = new HashMap<>();
|
||||
private volatile Pattern groupingRegex = null;
|
||||
|
||||
@VisibleForTesting
|
||||
final static int PATTERNS_CACHE_MAXIMUM_ENTRIES = 1024;
|
||||
|
||||
/**
|
||||
* LRU cache for the compiled patterns. The size of the cache is determined by the value of
|
||||
* {@link #PATTERNS_CACHE_MAXIMUM_ENTRIES}.
|
||||
*/
|
||||
@VisibleForTesting
|
||||
final ConcurrentMap<String, Pattern> patternsCache = CacheBuilder.newBuilder()
|
||||
final ConcurrentMap<String, Pattern> patternsCache = Caffeine.newBuilder()
|
||||
.maximumSize(PATTERNS_CACHE_MAXIMUM_ENTRIES)
|
||||
.<String, Pattern>build()
|
||||
.asMap();
|
||||
|
|
|
@ -16,9 +16,8 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import com.github.benmanes.caffeine.cache.LoadingCache;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.EventDriven;
|
||||
|
@ -261,18 +260,12 @@ public class TransformXml extends AbstractProcessor {
|
|||
final Long cacheTTL = context.getProperty(CACHE_TTL_AFTER_LAST_ACCESS).asTimePeriod(TimeUnit.SECONDS);
|
||||
|
||||
if (cacheSize > 0) {
|
||||
CacheBuilder<Object, Object> cacheBuilder = CacheBuilder.newBuilder().maximumSize(cacheSize);
|
||||
final Caffeine<Object, Object> cacheBuilder = Caffeine.newBuilder().maximumSize(cacheSize);
|
||||
if (cacheTTL > 0) {
|
||||
cacheBuilder.expireAfterAccess(cacheTTL, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
cache = cacheBuilder.build(
|
||||
new CacheLoader<String, Templates>() {
|
||||
@Override
|
||||
public Templates load(final String path) throws TransformerConfigurationException, LookupFailureException {
|
||||
return newTemplates(context, path);
|
||||
}
|
||||
});
|
||||
cache = cacheBuilder.build(path -> newTemplates(context, path));
|
||||
} else {
|
||||
cache = null;
|
||||
logger.info("Stylesheet cache disabled because cache size is set to 0");
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard.db.impl;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.nifi.util.StringUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -67,9 +66,15 @@ public class MySQLDatabaseAdapter extends GenericDatabaseAdapter {
|
|||
|
||||
@Override
|
||||
public String getUpsertStatement(String table, List<String> columnNames, Collection<String> uniqueKeyColumnNames) {
|
||||
Preconditions.checkArgument(!StringUtils.isEmpty(table), "Table name cannot be null or blank");
|
||||
Preconditions.checkArgument(columnNames != null && !columnNames.isEmpty(), "Column names cannot be null or empty");
|
||||
Preconditions.checkArgument(uniqueKeyColumnNames != null && !uniqueKeyColumnNames.isEmpty(), "Key column names cannot be null or empty");
|
||||
if (StringUtils.isEmpty(table)) {
|
||||
throw new IllegalArgumentException("Table name cannot be null or blank");
|
||||
}
|
||||
if (columnNames == null || columnNames.isEmpty()) {
|
||||
throw new IllegalArgumentException("Column names cannot be null or empty");
|
||||
}
|
||||
if (uniqueKeyColumnNames == null || uniqueKeyColumnNames.isEmpty()) {
|
||||
throw new IllegalArgumentException("Key column names cannot be null or empty");
|
||||
}
|
||||
|
||||
String columns = columnNames.stream()
|
||||
.collect(Collectors.joining(", "));
|
||||
|
@ -96,9 +101,15 @@ public class MySQLDatabaseAdapter extends GenericDatabaseAdapter {
|
|||
|
||||
@Override
|
||||
public String getInsertIgnoreStatement(String table, List<String> columnNames, Collection<String> uniqueKeyColumnNames) {
|
||||
Preconditions.checkArgument(!StringUtils.isEmpty(table), "Table name cannot be null or blank");
|
||||
Preconditions.checkArgument(columnNames != null && !columnNames.isEmpty(), "Column names cannot be null or empty");
|
||||
Preconditions.checkArgument(uniqueKeyColumnNames != null && !uniqueKeyColumnNames.isEmpty(), "Key column names cannot be null or empty");
|
||||
if (StringUtils.isEmpty(table)) {
|
||||
throw new IllegalArgumentException("Table name cannot be null or blank");
|
||||
}
|
||||
if (columnNames == null || columnNames.isEmpty()) {
|
||||
throw new IllegalArgumentException("Column names cannot be null or empty");
|
||||
}
|
||||
if (uniqueKeyColumnNames == null || uniqueKeyColumnNames.isEmpty()) {
|
||||
throw new IllegalArgumentException("Key column names cannot be null or empty");
|
||||
}
|
||||
|
||||
String columns = columnNames.stream()
|
||||
.collect(Collectors.joining(", "));
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard.db.impl;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.nifi.util.StringUtils;
|
||||
|
||||
import java.util.Collection;
|
||||
|
@ -46,9 +45,15 @@ public class PostgreSQLDatabaseAdapter extends GenericDatabaseAdapter {
|
|||
|
||||
@Override
|
||||
public String getUpsertStatement(String table, List<String> columnNames, Collection<String> uniqueKeyColumnNames) {
|
||||
Preconditions.checkArgument(!StringUtils.isEmpty(table), "Table name cannot be null or blank");
|
||||
Preconditions.checkArgument(columnNames != null && !columnNames.isEmpty(), "Column names cannot be null or empty");
|
||||
Preconditions.checkArgument(uniqueKeyColumnNames != null && !uniqueKeyColumnNames.isEmpty(), "Key column names cannot be null or empty");
|
||||
if (StringUtils.isEmpty(table)) {
|
||||
throw new IllegalArgumentException("Table name cannot be null or blank");
|
||||
}
|
||||
if (columnNames == null || columnNames.isEmpty()) {
|
||||
throw new IllegalArgumentException("Column names cannot be null or empty");
|
||||
}
|
||||
if (uniqueKeyColumnNames == null || uniqueKeyColumnNames.isEmpty()) {
|
||||
throw new IllegalArgumentException("Key column names cannot be null or empty");
|
||||
}
|
||||
|
||||
String columns = columnNames.stream()
|
||||
.collect(Collectors.joining(", "));
|
||||
|
@ -80,9 +85,15 @@ public class PostgreSQLDatabaseAdapter extends GenericDatabaseAdapter {
|
|||
|
||||
@Override
|
||||
public String getInsertIgnoreStatement(String table, List<String> columnNames, Collection<String> uniqueKeyColumnNames) {
|
||||
Preconditions.checkArgument(!StringUtils.isEmpty(table), "Table name cannot be null or blank");
|
||||
Preconditions.checkArgument(columnNames != null && !columnNames.isEmpty(), "Column names cannot be null or empty");
|
||||
Preconditions.checkArgument(uniqueKeyColumnNames != null && !uniqueKeyColumnNames.isEmpty(), "Key column names cannot be null or empty");
|
||||
if (StringUtils.isEmpty(table)) {
|
||||
throw new IllegalArgumentException("Table name cannot be null or blank");
|
||||
}
|
||||
if (columnNames == null || columnNames.isEmpty()) {
|
||||
throw new IllegalArgumentException("Column names cannot be null or empty");
|
||||
}
|
||||
if (uniqueKeyColumnNames == null || uniqueKeyColumnNames.isEmpty()) {
|
||||
throw new IllegalArgumentException("Key column names cannot be null or empty");
|
||||
}
|
||||
|
||||
String columns = columnNames.stream()
|
||||
.collect(Collectors.joining(", "));
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard.servlets;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.flowfile.FlowFile;
|
||||
|
@ -63,10 +61,12 @@ import java.io.InputStream;
|
|||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
@ -241,7 +241,7 @@ public class ListenHTTPServlet extends HttpServlet {
|
|||
}
|
||||
|
||||
Set<FlowFile> flowFileSet;
|
||||
if (!Strings.isNullOrEmpty(request.getContentType()) && request.getContentType().contains("multipart/form-data")) {
|
||||
if (StringUtils.isNotBlank(request.getContentType()) && request.getContentType().contains("multipart/form-data")) {
|
||||
flowFileSet = handleMultipartRequest(request, session, foundSubject, foundIssuer);
|
||||
} else {
|
||||
flowFileSet = handleRequest(request, session, foundSubject, foundIssuer, destinationIsLegacyNiFi, contentType, in);
|
||||
|
@ -272,9 +272,11 @@ public class ListenHTTPServlet extends HttpServlet {
|
|||
Set<FlowFile> flowFileSet = new HashSet<>();
|
||||
String tempDir = System.getProperty("java.io.tmpdir");
|
||||
request.setAttribute(Request.MULTIPART_CONFIG_ELEMENT, new MultipartConfigElement(tempDir, multipartRequestMaxSize, multipartRequestMaxSize, multipartReadBufferSize));
|
||||
List<Part> requestParts = ImmutableList.copyOf(request.getParts());
|
||||
for (int i = 0; i < requestParts.size(); i++) {
|
||||
Part part = requestParts.get(i);
|
||||
Collection<Part> requestParts = Collections.unmodifiableCollection(request.getParts());
|
||||
final Iterator<Part> parts = requestParts.iterator();
|
||||
int i = 0;
|
||||
while (parts.hasNext()) {
|
||||
Part part = parts.next();
|
||||
FlowFile flowFile = session.create();
|
||||
try (OutputStream flowFileOutputStream = session.write(flowFile)) {
|
||||
StreamUtils.copy(part.getInputStream(), flowFileOutputStream);
|
||||
|
@ -282,6 +284,7 @@ public class ListenHTTPServlet extends HttpServlet {
|
|||
flowFile = saveRequestDetailsAsAttributes(request, session, foundSubject, foundIssuer, flowFile);
|
||||
flowFile = savePartDetailsAsAttributes(session, part, flowFile, i, requestParts.size());
|
||||
flowFileSet.add(flowFile);
|
||||
i++;
|
||||
}
|
||||
return flowFileSet;
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ import java.util.Map;
|
|||
import java.util.function.Supplier;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.net.ftp.FTPClient;
|
||||
import org.apache.commons.net.ftp.FTPFile;
|
||||
import org.apache.commons.net.ftp.FTPReply;
|
||||
|
@ -528,7 +527,6 @@ public class FTPTransfer implements FileTransfer {
|
|||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
protected FTPClient createClient(final PropertyContext context, final Map<String, String> attributes) {
|
||||
return FTP_CLIENT_PROVIDER.getClient(context, attributes);
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import com.google.common.base.Splitter;
|
||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
|
@ -25,6 +24,7 @@ import org.apache.nifi.util.TestRunners;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
import java.util.Set;
|
||||
|
@ -815,7 +815,7 @@ public class TestAttributesToCSV {
|
|||
assertEquals(contentDataString.split(newline)[1], "Malibu Beach,\"California, US\"," + path + "," + filename + "," + uuid);
|
||||
}
|
||||
private List<String> getStrings(String sdata) {
|
||||
return Splitter.on(Pattern.compile(SPLIT_REGEX)).splitToList(sdata);
|
||||
return Arrays.asList(Pattern.compile(SPLIT_REGEX).split(sdata));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ import static org.hamcrest.CoreMatchers.containsString;
|
|||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.IsNot.not;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
|
@ -30,8 +31,6 @@ import org.apache.nifi.util.TestRunner;
|
|||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
public class TestLogAttribute {
|
||||
|
||||
@Test
|
||||
|
@ -44,7 +43,7 @@ public class TestLogAttribute {
|
|||
|
||||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_LOG_CSV, "foo, bar");
|
||||
|
||||
final Map<String,String> attrs = Maps.newHashMap();
|
||||
final Map<String,String> attrs = new LinkedHashMap<>();
|
||||
attrs.put("foo", "foo-value");
|
||||
attrs.put("bar", "bar-value");
|
||||
attrs.put("foobaz", "foobaz-value");
|
||||
|
@ -67,7 +66,7 @@ public class TestLogAttribute {
|
|||
|
||||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_LOG_REGEX, "foo.*");
|
||||
|
||||
final Map<String,String> attrs = Maps.newHashMap();
|
||||
final Map<String,String> attrs = new LinkedHashMap<>();
|
||||
attrs.put("foo", "foo-value");
|
||||
attrs.put("bar", "bar-value");
|
||||
attrs.put("foobaz", "foobaz-value");
|
||||
|
@ -92,7 +91,7 @@ public class TestLogAttribute {
|
|||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_LOG_CSV, "foo, bar");
|
||||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_LOG_REGEX, "foo*");
|
||||
|
||||
final Map<String,String> attrs = Maps.newHashMap();
|
||||
final Map<String,String> attrs = new LinkedHashMap<>();
|
||||
attrs.put("foo", "foo-value");
|
||||
attrs.put("bar", "bar-value");
|
||||
attrs.put("foobaz", "foobaz-value");
|
||||
|
@ -115,7 +114,7 @@ public class TestLogAttribute {
|
|||
|
||||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_IGNORE_CSV, "bar");
|
||||
|
||||
final Map<String,String> attrs = Maps.newHashMap();
|
||||
final Map<String,String> attrs = new LinkedHashMap<>();
|
||||
attrs.put("foo", "foo-value");
|
||||
attrs.put("bar", "bar-value");
|
||||
attrs.put("foobaz", "foobaz-value");
|
||||
|
@ -138,7 +137,7 @@ public class TestLogAttribute {
|
|||
|
||||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_IGNORE_REGEX, "foo.*");
|
||||
|
||||
final Map<String,String> attrs = Maps.newHashMap();
|
||||
final Map<String,String> attrs = new LinkedHashMap<>();
|
||||
attrs.put("foo", "foo-value");
|
||||
attrs.put("bar", "bar-value");
|
||||
attrs.put("foobaz", "foobaz-value");
|
||||
|
@ -163,7 +162,7 @@ public class TestLogAttribute {
|
|||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_IGNORE_CSV, "foo,bar");
|
||||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_IGNORE_REGEX, "foo.*");
|
||||
|
||||
final Map<String,String> attrs = Maps.newHashMap();
|
||||
final Map<String,String> attrs = new LinkedHashMap<>();
|
||||
attrs.put("foo", "foo-value");
|
||||
attrs.put("bar", "bar-value");
|
||||
attrs.put("foobaz", "foobaz-value");
|
||||
|
@ -188,7 +187,7 @@ public class TestLogAttribute {
|
|||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_LOG_CSV, "foo");
|
||||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_IGNORE_REGEX, "foo.*");
|
||||
|
||||
final Map<String,String> attrs = Maps.newHashMap();
|
||||
final Map<String,String> attrs = new LinkedHashMap<>();
|
||||
attrs.put("foo", "foo-value");
|
||||
attrs.put("bar", "bar-value");
|
||||
attrs.put("foobaz", "foobaz-value");
|
||||
|
@ -213,7 +212,7 @@ public class TestLogAttribute {
|
|||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_LOG_CSV, "foo,foobaz");
|
||||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_IGNORE_CSV, "foobaz");
|
||||
|
||||
final Map<String,String> attrs = Maps.newHashMap();
|
||||
final Map<String,String> attrs = new LinkedHashMap<>();
|
||||
attrs.put("foo", "foo-value");
|
||||
attrs.put("bar", "bar-value");
|
||||
attrs.put("foobaz", "foobaz-value");
|
||||
|
@ -237,7 +236,7 @@ public class TestLogAttribute {
|
|||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_LOG_REGEX, "foo.*"); // includes foo,foobaz
|
||||
runner.setProperty(LogAttribute.ATTRIBUTES_TO_IGNORE_REGEX, "foobaz.*"); // includes foobaz
|
||||
|
||||
final Map<String,String> attrs = Maps.newHashMap();
|
||||
final Map<String,String> attrs = new LinkedHashMap<>();
|
||||
attrs.put("foo", "foo-value");
|
||||
attrs.put("bar", "bar-value");
|
||||
attrs.put("foobaz", "foobaz-value");
|
||||
|
|
|
@ -24,11 +24,11 @@ import static org.junit.Assert.fail;
|
|||
import java.io.IOException;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
|
@ -771,8 +771,8 @@ public class TestRouteText {
|
|||
runner.setProperty(RouteText.MATCH_STRATEGY, RouteText.MATCHES_REGULAR_EXPRESSION);
|
||||
runner.setProperty("simple", ".*(${someValue}).*");
|
||||
|
||||
runner.enqueue("some text", ImmutableMap.of("someValue", "a value"));
|
||||
runner.enqueue("some other text", ImmutableMap.of("someValue", "a value"));
|
||||
runner.enqueue("some text", Collections.singletonMap("someValue", "a value"));
|
||||
runner.enqueue("some other text", Collections.singletonMap("someValue", "a value"));
|
||||
runner.run(2);
|
||||
|
||||
assertEquals("Expected 1 elements in the cache for the patterns, got" +
|
||||
|
@ -781,7 +781,7 @@ public class TestRouteText {
|
|||
for (int i = 0; i < RouteText.PATTERNS_CACHE_MAXIMUM_ENTRIES * 2; ++i) {
|
||||
String iString = Long.toString(i);
|
||||
runner.enqueue("some text with " + iString + "in it",
|
||||
ImmutableMap.of("someValue", iString));
|
||||
Collections.singletonMap("someValue", iString));
|
||||
runner.run();
|
||||
}
|
||||
|
||||
|
|
|
@ -275,11 +275,6 @@
|
|||
<artifactId>bcrypt</artifactId>
|
||||
<version>0.9.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>28.0-jre</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.xerial.snappy</groupId>
|
||||
<artifactId>snappy-java</artifactId>
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.schemaregistry.hortonworks;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.hortonworks.registries.schemaregistry.SchemaIdVersion;
|
||||
import com.hortonworks.registries.schemaregistry.SchemaMetadata;
|
||||
import com.hortonworks.registries.schemaregistry.SchemaMetadataInfo;
|
||||
|
@ -50,6 +49,7 @@ import org.apache.nifi.util.Tuple;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -242,7 +242,7 @@ public class HortonworksSchemaRegistry extends AbstractControllerService impleme
|
|||
|
||||
private Map<String, String> buildSslProperties(final ConfigurationContext context) {
|
||||
final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class);
|
||||
ImmutableMap.Builder<String, String> propertiesBuilder = ImmutableMap.builder();
|
||||
final Map<String, String> propertiesBuilder = new HashMap<>();
|
||||
if (sslContextService != null) {
|
||||
propertiesBuilder.put("protocol", sslContextService.getSslAlgorithm());
|
||||
if (sslContextService.isKeyStoreConfigured()) {
|
||||
|
@ -259,7 +259,7 @@ public class HortonworksSchemaRegistry extends AbstractControllerService impleme
|
|||
propertiesBuilder.put("trustStoreType", sslContextService.getTrustStoreType());
|
||||
}
|
||||
}
|
||||
return propertiesBuilder.build();
|
||||
return Collections.unmodifiableMap(propertiesBuilder);
|
||||
}
|
||||
|
||||
@OnDisabled
|
||||
|
|
|
@ -124,11 +124,6 @@
|
|||
<artifactId>validation-api</artifactId>
|
||||
<version>2.0.1.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>27.0.1-jre</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fluenda</groupId>
|
||||
<artifactId>parcefone</artifactId>
|
||||
|
|
|
@ -24,8 +24,6 @@ import org.apache.nifi.serialization.record.MapRecord;
|
|||
import org.apache.nifi.serialization.record.Record;
|
||||
import org.apache.nifi.serialization.record.RecordSchema;
|
||||
|
||||
import com.google.common.base.Throwables;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -49,7 +47,7 @@ public abstract class AvroRecordReader implements RecordReader {
|
|||
} catch (MalformedRecordException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new MalformedRecordException("Error while getting next record. Root cause: " + Throwables.getRootCause(e), e);
|
||||
throw new MalformedRecordException("Error while getting next record", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,9 +42,6 @@ import org.apache.nifi.serialization.record.RecordField;
|
|||
import org.apache.nifi.serialization.record.RecordFieldType;
|
||||
import org.apache.nifi.serialization.record.RecordSchema;
|
||||
|
||||
import com.google.common.base.Throwables;
|
||||
|
||||
|
||||
public class CSVRecordReader extends AbstractCSVRecordReader {
|
||||
private final CSVParser csvParser;
|
||||
|
||||
|
@ -116,7 +113,7 @@ public class CSVRecordReader extends AbstractCSVRecordReader {
|
|||
return new MapRecord(schema, values, coerceTypes, dropUnknownFields);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new MalformedRecordException("Error while getting next record. Root cause: " + Throwables.getRootCause(e), e);
|
||||
throw new MalformedRecordException("Error while getting next record", e);
|
||||
}
|
||||
|
||||
return null;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.schema.inference;
|
||||
|
||||
import avro.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
|
@ -112,7 +111,6 @@ public class VolatileSchemaCache extends AbstractControllerService implements Re
|
|||
return Optional.ofNullable(cachedSchema);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
protected String createIdentifier(final RecordSchema schema) {
|
||||
final MessageDigest digest;
|
||||
try {
|
||||
|
|
|
@ -28,8 +28,6 @@ import org.apache.nifi.serialization.record.RecordSchema;
|
|||
import org.junit.Test;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import com.google.common.base.Throwables;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -101,7 +99,6 @@ public class ITApacheCSVRecordReader {
|
|||
while (reader.nextRecord() != null) {}
|
||||
} catch (Exception e) {
|
||||
assertThat(e, instanceOf(MalformedRecordException.class));
|
||||
assertThat(Throwables.getRootCause(e), instanceOf(IOException.class));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.schema.inference;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.nifi.serialization.SimpleRecordSchema;
|
||||
import org.apache.nifi.serialization.record.DataType;
|
||||
import org.apache.nifi.serialization.record.RecordField;
|
||||
|
@ -31,7 +30,6 @@ import java.util.List;
|
|||
import java.util.Set;
|
||||
import java.util.function.BiFunction;
|
||||
|
||||
import static com.google.common.collect.Collections2.permutations;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestFieldTypeInference {
|
||||
|
@ -66,10 +64,10 @@ public class TestFieldTypeInference {
|
|||
RecordFieldType.FLOAT.getDataType()
|
||||
);
|
||||
|
||||
Set<DataType> expected = Sets.newHashSet(
|
||||
Set<DataType> expected = new HashSet<>(Arrays.asList(
|
||||
RecordFieldType.INT.getDataType(),
|
||||
RecordFieldType.FLOAT.getDataType()
|
||||
);
|
||||
));
|
||||
|
||||
// WHEN
|
||||
// THEN
|
||||
|
@ -85,10 +83,10 @@ public class TestFieldTypeInference {
|
|||
);
|
||||
|
||||
|
||||
Set<DataType> expected = Sets.newHashSet(
|
||||
Set<DataType> expected = new HashSet<>(Arrays.asList(
|
||||
RecordFieldType.INT.getDataType(),
|
||||
RecordFieldType.STRING.getDataType()
|
||||
);
|
||||
));
|
||||
|
||||
// WHEN
|
||||
// THEN
|
||||
|
@ -104,11 +102,11 @@ public class TestFieldTypeInference {
|
|||
RecordFieldType.STRING.getDataType()
|
||||
);
|
||||
|
||||
Set<DataType> expected = Sets.newHashSet(
|
||||
Set<DataType> expected = new HashSet<>(Arrays.asList(
|
||||
RecordFieldType.INT.getDataType(),
|
||||
RecordFieldType.FLOAT.getDataType(),
|
||||
RecordFieldType.STRING.getDataType()
|
||||
);
|
||||
));
|
||||
|
||||
// WHEN
|
||||
// THEN
|
||||
|
@ -200,7 +198,7 @@ public class TestFieldTypeInference {
|
|||
}
|
||||
|
||||
private <I, E> void runWithAllPermutations(BiFunction<List<I>, E, ?> test, List<I> input, E expected) {
|
||||
permutations(input).forEach(inputPermutation -> test.apply(inputPermutation, expected));
|
||||
test.apply(input, expected);
|
||||
}
|
||||
|
||||
private Void testToDataTypeShouldReturnChoice(List<DataType> dataTypes, Set<DataType> expected) {
|
||||
|
|
Loading…
Reference in New Issue