NIFI-12142: This closes #7806. Deleted many methods, classes, and references that were deprecated.

Signed-off-by: Joseph Witt <joewitt@apache.org>
This commit is contained in:
Mark Payne 2023-09-27 13:06:30 -04:00 committed by Joseph Witt
parent acd9b5b10b
commit db727aa419
No known key found for this signature in database
GPG Key ID: 9093BF854F811A1A
278 changed files with 4415 additions and 7419 deletions

View File

@ -17,8 +17,6 @@
package org.apache.nifi.c2.protocol.api;
import static java.lang.String.format;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
@ -26,6 +24,8 @@ import java.util.Map;
import java.util.Objects;
import java.util.Set;
import static java.lang.String.format;
@ApiModel
public class C2Operation implements Serializable {
private static final long serialVersionUID = 1L;
@ -36,7 +36,7 @@ public class C2Operation implements Serializable {
private Map<String, String> args;
private Set<String> dependencies;
@ApiModelProperty(value = "A unique identifier for the operation", readOnly = true)
@ApiModelProperty(value = "A unique identifier for the operation", accessMode = ApiModelProperty.AccessMode.READ_ONLY)
public String getIdentifier() {
return identifier;
}

View File

@ -43,7 +43,7 @@ public class Operation extends C2Operation {
@ApiModelProperty(
value = "The current state of the operation",
readOnly = true)
accessMode = ApiModelProperty.AccessMode.READ_ONLY)
public OperationState getState() {
return state;
}
@ -62,8 +62,8 @@ public class Operation extends C2Operation {
}
@ApiModelProperty(value = "The verified identity of the C2 client that created the operation",
readOnly = true,
notes = "This field is set by the server when an operation request is submitted to identify the origin. " +
accessMode = ApiModelProperty.AccessMode.READ_ONLY,
notes = "This field is set by the server when an operation request is submitted to identify the origin. " +
"When the C2 instance is secured, this is the client principal identity (e.g., certificate DN). " +
"When the C2 instances is unsecured, this will be 'anonymous' as client identity can not be authenticated.")
public String getCreatedBy() {

View File

@ -18,11 +18,10 @@ package org.apache.nifi.c2.protocol.component.api;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.apache.nifi.expression.ExpressionLanguageScope;
import java.io.Serializable;
import java.util.Collections;
import java.util.List;
import org.apache.nifi.expression.ExpressionLanguageScope;
@ApiModel
@ -118,7 +117,7 @@ public class PropertyDescriptor implements Serializable {
this.expressionLanguageScopeDescription = expressionLanguageScope == null ? null : expressionLanguageScope.getDescription();
}
@ApiModelProperty(value = "The description of the expression language scope supported by this property", readOnly = true)
@ApiModelProperty(value = "The description of the expression language scope supported by this property", accessMode = ApiModelProperty.AccessMode.READ_ONLY)
public String getExpressionLanguageScopeDescription() {
return expressionLanguageScope == null ? null : expressionLanguageScope.getDescription();
}

View File

@ -17,10 +17,6 @@
package org.apache.nifi.minifi.bootstrap.configuration;
import static java.util.Optional.ofNullable;
import static java.util.function.Predicate.not;
import static java.util.stream.Collectors.toList;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
@ -37,6 +33,10 @@ import org.apache.nifi.minifi.bootstrap.util.ByteBufferInputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.Optional.ofNullable;
import static java.util.function.Predicate.not;
import static java.util.stream.Collectors.toList;
public class ConfigurationChangeCoordinator implements Closeable, ConfigurationChangeNotifier {
public static final String NOTIFIER_INGESTORS_KEY = "nifi.minifi.notifier.ingestors";
@ -118,11 +118,11 @@ public class ConfigurationChangeCoordinator implements Closeable, ConfigurationC
private void instantiateIngestor(Properties bootstrapProperties, String ingestorClassname) {
try {
Class<?> ingestorClass = Class.forName(ingestorClassname);
ChangeIngestor changeIngestor = (ChangeIngestor) ingestorClass.newInstance();
ChangeIngestor changeIngestor = (ChangeIngestor) ingestorClass.getDeclaredConstructor().newInstance();
changeIngestor.initialize(bootstrapProperties, runMiNiFi, this);
changeIngestors.add(changeIngestor);
LOGGER.info("Initialized ingestor: {}", ingestorClassname);
} catch (Exception e) {
} catch (final Exception e) {
LOGGER.error("Instantiating [{}] ingestor failed", ingestorClassname, e);
}
}

View File

@ -17,8 +17,7 @@
package org.apache.nifi.minifi.bootstrap.service;
import static org.apache.nifi.minifi.commons.api.MiNiFiProperties.NIFI_MINIFI_STATUS_REPORTER_COMPONENTS;
import java.lang.reflect.InvocationTargetException;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
@ -33,6 +32,8 @@ import org.apache.nifi.minifi.commons.status.FlowStatusReport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.nifi.minifi.commons.api.MiNiFiProperties.NIFI_MINIFI_STATUS_REPORTER_COMPONENTS;
public class PeriodicStatusReporterManager implements QueryableStatusAggregator {
private static final Logger LOGGER = LoggerFactory.getLogger(PeriodicStatusReporterManager.class);
private static final String FLOW_STATUS_REPORT_CMD = "FLOW_STATUS_REPORT";
@ -103,11 +104,11 @@ public class PeriodicStatusReporterManager implements QueryableStatusAggregator
for (String reporterClassname : reportersCsv.split(",")) {
try {
Class<?> reporterClass = Class.forName(reporterClassname);
PeriodicStatusReporter reporter = (PeriodicStatusReporter) reporterClass.newInstance();
PeriodicStatusReporter reporter = (PeriodicStatusReporter) reporterClass.getDeclaredConstructor().newInstance();
reporter.initialize(bootstrapProperties, this);
statusReporters.add(reporter);
LOGGER.debug("Initialized {} notifier", reporterClass.getCanonicalName());
} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
} catch (InstantiationException | IllegalAccessException | ClassNotFoundException | NoSuchMethodException | InvocationTargetException e) {
throw new RuntimeException("Issue instantiating notifier " + reporterClassname, e);
}
}

View File

@ -17,10 +17,6 @@
package org.apache.nifi.minifi.bootstrap.configuration.ingestors;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collections;
@ -38,6 +34,10 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public abstract class RestChangeIngestorCommonTest {
private static final String testString = "This is a test string.";
@ -84,7 +84,7 @@ public abstract class RestChangeIngestorCommonTest {
Request request = new Request.Builder()
.url(url)
.post(RequestBody.create(MEDIA_TYPE_MARKDOWN, testString))
.post(RequestBody.create(testString, MEDIA_TYPE_MARKDOWN))
.addHeader("charset", "UTF-8")
.build();
@ -110,7 +110,7 @@ public abstract class RestChangeIngestorCommonTest {
Request request = new Request.Builder()
.url(url)
.post(RequestBody.create(MEDIA_TYPE_MARKDOWN, testString))
.post(RequestBody.create(testString, MEDIA_TYPE_MARKDOWN))
.addHeader("charset", "UTF-8")
.build();

View File

@ -17,12 +17,11 @@
package org.apache.nifi.minifi.c2.security.authentication;
import org.springframework.security.authentication.AbstractAuthenticationToken;
import org.springframework.security.core.GrantedAuthority;
import java.security.cert.X509Certificate;
import java.util.Arrays;
import java.util.Collection;
import org.springframework.security.authentication.AbstractAuthenticationToken;
import org.springframework.security.core.GrantedAuthority;
public class X509AuthenticationToken extends AbstractAuthenticationToken {
private final X509Certificate[] x509Certificates;
@ -37,7 +36,7 @@ public class X509AuthenticationToken extends AbstractAuthenticationToken {
super(grantedAuthorities);
this.x509Certificates = Arrays.copyOf(x509Certificates, x509Certificates.length, X509Certificate[].class);
X509Certificate x509Certificate = x509Certificates[0];
this.subjectDn = x509Certificate.getSubjectDN().getName().trim();
this.subjectDn = x509Certificate.getSubjectX500Principal().getName().trim();
}
@Override

View File

@ -17,8 +17,6 @@
package org.apache.nifi.minifi.toolkit.schema.common;
import org.apache.nifi.minifi.toolkit.schema.exception.SchemaInstantiatonException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
@ -31,6 +29,7 @@ import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.apache.nifi.minifi.toolkit.schema.exception.SchemaInstantiatonException;
public abstract class BaseSchema implements Schema {
public static final String IT_WAS_NOT_FOUND_AND_IT_IS_REQUIRED = "it was not found and it is required";
@ -176,8 +175,8 @@ public abstract class BaseSchema implements Schema {
} else {
if(instantiateIfNull) {
try {
return (T) targetClass.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
return (T) targetClass.getDeclaredConstructor().newInstance();
} catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
addValidationIssue(key, wrapperName, "no value was given, and it is supposed to be created with default values as a default, and when attempting to create it the following " +
"exception was thrown:" + e.getMessage());
}

View File

@ -38,9 +38,6 @@ public @interface DynamicProperty {
String name();
@Deprecated
boolean supportsExpressionLanguage() default false;
String value();
String description();

View File

@ -16,6 +16,9 @@
*/
package org.apache.nifi.components;
import java.time.Duration;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.nifi.components.resource.ResourceReference;
import org.apache.nifi.components.resource.ResourceReferences;
import org.apache.nifi.controller.ControllerService;
@ -25,9 +28,6 @@ import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.registry.EnvironmentVariables;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* <p>
* A PropertyValue provides a mechanism whereby the currently configured value
@ -83,6 +83,13 @@ public interface PropertyValue {
*/
Long asTimePeriod(TimeUnit timeUnit);
/**
* Returns the value as a Duration
*
* @return a Duration representing the value, or <code>null</code> if the value is unset
*/
Duration asDuration();
/**
*
* @param dataUnit specifies the DataUnit to convert the data size into

View File

@ -24,18 +24,6 @@ import java.util.Optional;
* Provides a representation of a component's state at some point in time.
*/
public interface StateMap {
/**
* Each time that a component's state is updated, the state is assigned a new version.
* This version can then be used to atomically update state by the backing storage mechanism.
* Though this number is monotonically increasing, it should not be expected to increment always
* from X to X+1. I.e., version numbers may be skipped.
*
* @deprecated This method should be replaced with getStateVersion()
*
* @return the version associated with the state
*/
@Deprecated
long getVersion();
/**
* Get state version is not guaranteed to be numeric, but can be used to compare against an expected version.
@ -43,10 +31,7 @@ public interface StateMap {
*
* @return State version or empty when not known
*/
default Optional<String> getStateVersion() {
final long version = getVersion();
return version == -1 ? Optional.empty() : Optional.of(String.valueOf(version));
}
Optional<String> getStateVersion();
/**
* Returns the value associated with the given key

View File

@ -16,6 +16,19 @@
*/
package org.apache.nifi.documentation.xml;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.DynamicRelationship;
import org.apache.nifi.annotation.behavior.InputRequirement;
@ -36,9 +49,9 @@ import org.apache.nifi.annotation.configuration.DefaultSchedule;
import org.apache.nifi.annotation.configuration.DefaultSettings;
import org.apache.nifi.annotation.documentation.DeprecationNotice;
import org.apache.nifi.annotation.documentation.MultiProcessorUseCase;
import org.apache.nifi.annotation.documentation.ProcessorConfiguration;
import org.apache.nifi.annotation.documentation.SeeAlso;
import org.apache.nifi.annotation.documentation.UseCase;
import org.apache.nifi.annotation.documentation.ProcessorConfiguration;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.ConfigurableComponent;
import org.apache.nifi.components.PropertyDependency;
@ -51,20 +64,6 @@ import org.apache.nifi.documentation.ExtensionType;
import org.apache.nifi.documentation.ServiceAPI;
import org.apache.nifi.processor.Relationship;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
/**
* XML-based implementation of DocumentationWriter
*
@ -277,7 +276,6 @@ public class XmlDocumentationWriter extends AbstractDocumentationWriter {
writeTextElement("name", property.name());
writeTextElement("value", property.value());
writeTextElement("description", property.description());
writeBooleanElement("expressionLanguageSupported", property.supportsExpressionLanguage());
writeTextElement("expressionLanguageScope", property.expressionLanguageScope() == null ? null : property.expressionLanguageScope().name());
writeEndElement();

View File

@ -16,6 +16,16 @@
*/
package org.apache.nifi.processor;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.controller.queue.QueueSize;
@ -29,17 +39,6 @@ import org.apache.nifi.processor.io.OutputStreamCallback;
import org.apache.nifi.processor.io.StreamCallback;
import org.apache.nifi.provenance.ProvenanceReporter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import java.util.regex.Pattern;
/**
* <p>
* A process session encompasses all the behaviors a processor can perform to
@ -708,39 +707,6 @@ public interface ProcessSession {
*/
InputStream read(FlowFile flowFile);
/**
* Executes the given callback against the contents corresponding to the
* given FlowFile.
*
* <i>Note</i>: The OutputStream provided to the given OutputStreamCallback
* will not be accessible once this method has completed its execution.
*
* @param source flowfile to retrieve content of
* @param allowSessionStreamManagement allow session to hold the stream open for performance reasons
* @param reader that will be called to read the flowfile content
* @throws IllegalStateException if detected that this method is being
* called from within a write callback of another method (i.e., from within the callback
* that is passed to {@link #write(FlowFile, OutputStreamCallback)} or {@link #write(FlowFile, StreamCallback)})
* or has an OutputStream open (via a call to {@link #write(FlowFile)}) in this session and for
* the given FlowFile(s). Said another way, it is not permissible to call this method while writing to
* the same FlowFile.
* @throws FlowFileHandlingException if the given FlowFile is already
* transferred or removed or doesn't belong to this session. Automatic
* rollback will occur.
* @throws MissingFlowFileException if the given FlowFile content cannot be
* found. The FlowFile should no longer be reference, will be internally
* destroyed, and the session is automatically rolled back and what is left
* of the FlowFile is destroyed.
* @throws FlowFileAccessException if some IO problem occurs accessing
* FlowFile content; if an attempt is made to access the InputStream
* provided to the given InputStreamCallback after this method completed its
* execution
*
* @deprecated Restricting the ProcessSession's ability to manage its own streams should not be used. The need for this
* capability was obviated by the introduction of the {@link #migrate(ProcessSession, Collection)} and {@link #migrate(ProcessSession)} methods.
*/
@Deprecated
void read(FlowFile source, boolean allowSessionStreamManagement, InputStreamCallback reader) throws FlowFileAccessException;
/**
* Combines the content of all given source FlowFiles into a single given

View File

@ -16,6 +16,22 @@
*/
package org.apache.nifi.bootstrap;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.nifi.attribute.expression.language.StandardPropertyValue;
import org.apache.nifi.bootstrap.notification.NotificationContext;
import org.apache.nifi.bootstrap.notification.NotificationInitializationContext;
@ -38,23 +54,6 @@ import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class NotificationServiceManager {
private static final Logger logger = LoggerFactory.getLogger(NotificationServiceManager.class);
private final Map<String, ConfiguredNotificationService> servicesById = new HashMap<>();
@ -331,7 +330,7 @@ public class NotificationServiceManager {
final Object serviceObject;
try {
serviceObject = clazz.newInstance();
serviceObject = clazz.getDeclaredConstructor().newInstance();
} catch (final Exception e) {
logger.error("Found configuration for Notification Service with ID '{}' and Class '{}' but could not instantiate Notification Service.", serviceId, className);
logger.error("", e);

View File

@ -16,6 +16,9 @@
*/
package org.apache.nifi.attribute.expression.language;
import java.time.Duration;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.resource.ResourceContext;
@ -33,9 +36,6 @@ import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.registry.EnvironmentVariables;
import org.apache.nifi.util.FormatUtils;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class StandardPropertyValue implements PropertyValue {
private final String rawValue;
@ -114,6 +114,11 @@ public class StandardPropertyValue implements PropertyValue {
return (rawValue == null) ? null : FormatUtils.getTimeDuration(rawValue.trim(), timeUnit);
}
@Override
public Duration asDuration() {
return isSet() ? Duration.ofNanos(asTimePeriod(TimeUnit.NANOSECONDS)) : null;
}
@Override
public Double asDataSize(final DataUnit dataUnit) {
return rawValue == null ? null : DataUnit.parseDataSize(rawValue.trim(), dataUnit);

View File

@ -16,6 +16,11 @@
*/
package org.apache.nifi.jetty.configuration.connector.alpn;
import java.net.SocketAddress;
import java.util.List;
import java.util.function.BiFunction;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLSession;
import org.eclipse.jetty.alpn.server.ALPNServerConnection;
import org.eclipse.jetty.io.Connection;
import org.eclipse.jetty.io.ssl.ALPNProcessor;
@ -24,12 +29,6 @@ import org.eclipse.jetty.io.ssl.SslHandshakeListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLSession;
import java.net.InetSocketAddress;
import java.util.List;
import java.util.function.BiFunction;
/**
* Standard ALPN Processor supporting JDK 1.8.0-251 and higher based on Jetty JDK9ServerALPNProcessor
*/
@ -55,7 +54,7 @@ public class StandardALPNProcessor implements ALPNProcessor.Server, SslHandshake
*/
@Override
public void configure(final SSLEngine sslEngine, final Connection connection) {
logger.debug("Configuring Connection Remote Address [{}]", connection.getEndPoint().getRemoteAddress());
logger.debug("Configuring Connection Remote Address [{}]", connection.getEndPoint().getRemoteSocketAddress());
final ALPNServerConnection serverConnection = (ALPNServerConnection) connection;
final ProtocolSelector protocolSelector = new ProtocolSelector(serverConnection);
sslEngine.setHandshakeApplicationProtocolSelector(protocolSelector);
@ -84,9 +83,9 @@ public class StandardALPNProcessor implements ALPNProcessor.Server, SslHandshake
try {
serverConnection.select(protocols);
protocol = serverConnection.getProtocol();
logger.debug("Connection Remote Address [{}] Application Layer Protocol [{}] selected", serverConnection.getEndPoint().getRemoteAddress(), protocol);
logger.debug("Connection Remote Address [{}] Application Layer Protocol [{}] selected", serverConnection.getEndPoint().getRemoteSocketAddress(), protocol);
} catch (final Throwable e) {
logger.debug("Connection Remote Address [{}] Application Layer Protocols {} not supported", serverConnection.getEndPoint().getRemoteAddress(), protocols);
logger.debug("Connection Remote Address [{}] Application Layer Protocols {} not supported", serverConnection.getEndPoint().getRemoteSocketAddress(), protocols);
}
return protocol;
}
@ -98,7 +97,7 @@ public class StandardALPNProcessor implements ALPNProcessor.Server, SslHandshake
*/
@Override
public void handshakeSucceeded(final Event event) {
final InetSocketAddress remoteAddress = serverConnection.getEndPoint().getRemoteAddress();
final SocketAddress remoteAddress = serverConnection.getEndPoint().getRemoteSocketAddress();
final SSLSession session = event.getSSLEngine().getSession();
logger.debug("Connection Remote Address [{}] Handshake Succeeded [{}] Cipher Suite [{}]", remoteAddress, session.getProtocol(), session.getCipherSuite());
@ -117,7 +116,7 @@ public class StandardALPNProcessor implements ALPNProcessor.Server, SslHandshake
*/
@Override
public void handshakeFailed(final Event event, final Throwable failure) {
logger.debug("Connection Remote Address [{}] Handshake Failed", serverConnection.getEndPoint().getRemoteAddress(), failure);
logger.debug("Connection Remote Address [{}] Handshake Failed", serverConnection.getEndPoint().getRemoteSocketAddress(), failure);
}
}
}

View File

@ -16,10 +16,6 @@
*/
package org.apache.nifi.util;
import org.apache.nifi.properties.ApplicationProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
@ -40,6 +36,9 @@ import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.nifi.properties.ApplicationProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The NiFiProperties class holds all properties which are needed for various
@ -934,13 +933,6 @@ public class NiFiProperties extends ApplicationProperties {
}
}
/**
* @deprecated Use getClusterNodeProtocolCorePoolSize() and getClusterNodeProtocolMaxPoolSize() instead
*/
@Deprecated()
public int getClusterNodeProtocolThreads() {
return getClusterNodeProtocolMaxPoolSize();
}
public int getClusterNodeProtocolMaxPoolSize() {
try {

View File

@ -844,13 +844,6 @@ public interface SiteToSiteClient extends Closeable {
return useCompression;
}
@Override
public String getUrl() {
if (urls != null && urls.size() > 0) {
return urls.iterator().next();
}
return null;
}
@Override
public Set<String> getUrls() {

View File

@ -21,9 +21,7 @@ import java.io.Serializable;
import java.net.InetAddress;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import javax.net.ssl.SSLContext;
import org.apache.nifi.components.state.StateManager;
import org.apache.nifi.events.EventReporter;
import org.apache.nifi.remote.protocol.DataPacket;
@ -32,15 +30,6 @@ import org.apache.nifi.remote.protocol.http.HttpProxy;
public interface SiteToSiteClientConfig extends Serializable {
/**
* @return the configured URL for the remote NiFi instance
* @deprecated This method only returns single URL string even if multiple URLs are set
* for backward compatibility for implementations that does not expect multiple URLs.
* {@link #getUrls()} should be used instead then should support multiple URLs when making requests.
*/
@Deprecated
String getUrl();
/**
* SiteToSite implementations should support multiple URLs when establishing a SiteToSite connection with a remote
* NiFi instance to provide robust connectivity so that it can keep working as long as at least one of

View File

@ -16,17 +16,16 @@
*/
package org.apache.nifi.remote.client.socket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocket;
import java.net.Socket;
import java.security.Principal;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.util.Optional;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Standard implementation attempts to read X.509 certificates from an SSLSocket
@ -61,7 +60,7 @@ public class StandardSocketPeerIdentityProvider implements SocketPeerIdentityPro
logger.warn("Peer Identity not found: Peer Certificates not provided [{}:{}]", peerHost, peerPort);
} else {
final X509Certificate peerCertificate = (X509Certificate) peerCertificates[0];
final Principal subjectDistinguishedName = peerCertificate.getSubjectDN();
final Principal subjectDistinguishedName = peerCertificate.getSubjectX500Principal();
peerIdentity = subjectDistinguishedName.getName();
}
} catch (final SSLPeerUnverifiedException e) {

View File

@ -16,16 +16,6 @@
*/
package org.apache.nifi.remote.util;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.HANDSHAKE_PROPERTY_BATCH_COUNT;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.HANDSHAKE_PROPERTY_BATCH_DURATION;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.HANDSHAKE_PROPERTY_BATCH_SIZE;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.HANDSHAKE_PROPERTY_REQUEST_EXPIRATION;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.HANDSHAKE_PROPERTY_USE_COMPRESSION;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.LOCATION_HEADER_NAME;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.LOCATION_URI_INTENT_NAME;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.LOCATION_URI_INTENT_VALUE;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonMappingException;
@ -135,6 +125,16 @@ import org.apache.nifi.web.api.entity.TransactionResultEntity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.HANDSHAKE_PROPERTY_BATCH_COUNT;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.HANDSHAKE_PROPERTY_BATCH_DURATION;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.HANDSHAKE_PROPERTY_BATCH_SIZE;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.HANDSHAKE_PROPERTY_REQUEST_EXPIRATION;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.HANDSHAKE_PROPERTY_USE_COMPRESSION;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.LOCATION_HEADER_NAME;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.LOCATION_URI_INTENT_NAME;
import static org.apache.nifi.remote.protocol.http.HttpHeaders.LOCATION_URI_INTENT_VALUE;
public class SiteToSiteRestApiClient implements Closeable {
private static final String EVENT_CATEGORY = "Site-to-Site";
@ -317,7 +317,7 @@ public class SiteToSiteRestApiClient implements Closeable {
try {
final X509Certificate cert = (X509Certificate) certChain[0];
trustedPeerDn = cert.getSubjectDN().getName().trim();
trustedPeerDn = cert.getSubjectX500Principal().getName().trim();
} catch (final RuntimeException e) {
final String msg = "Could not extract subject DN from SSL session peer certificate";
logger.warn(msg);

View File

@ -16,21 +16,20 @@
*/
package org.apache.nifi.remote.client.socket;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocket;
import javax.security.auth.x500.X500Principal;
import java.io.IOException;
import java.net.Socket;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.util.Optional;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocket;
import javax.security.auth.x500.X500Principal;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
@ -92,7 +91,7 @@ class StandardSocketPeerIdentityProviderTest {
when(sslSession.getPeerCertificates()).thenReturn(new X509Certificate[]{peerCertificate});
final X500Principal subjectDistinguishedName = new X500Principal(DISTINGUISHED_NAME);
when(peerCertificate.getSubjectDN()).thenReturn(subjectDistinguishedName);
when(peerCertificate.getSubjectX500Principal()).thenReturn(subjectDistinguishedName);
final Optional<String> peerIdentity = provider.getPeerIdentity(sslSocket);

View File

@ -1,43 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.remote.client.socket;
import org.apache.nifi.remote.client.SiteToSiteClient;
import org.apache.nifi.remote.client.SiteToSiteClientConfig;
import org.junit.jupiter.api.Test;
import java.util.LinkedHashSet;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestSiteToSiteClient {
@SuppressWarnings("deprecation")
@Test
public void testGetUrlBackwardCompatibility() {
final Set<String> urls = new LinkedHashSet<>();
urls.add("http://node1:8080/nifi");
urls.add("http://node2:8080/nifi");
final SiteToSiteClientConfig config = new SiteToSiteClient.Builder()
.urls(urls)
.buildConfig();
assertEquals("http://node1:8080/nifi", config.getUrl());
assertEquals(urls, config.getUrls());
}
}

View File

@ -1,42 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.io.socket.multicast;
import java.net.InetSocketAddress;
/**
* A service that may be discovered at runtime. A service is defined as having a
* unique case-sensitive service name and a socket address where it is
* available.
*
*/
public interface DiscoverableService {
/**
* The service's name. Two services are considered equal if they have the
* same case sensitive service name.
*
* @return the service's name
*/
String getServiceName();
/**
* @return the service's address
*/
InetSocketAddress getServiceAddress();
}

View File

@ -1,77 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.io.socket.multicast;
import java.net.InetSocketAddress;
import org.apache.commons.lang3.StringUtils;
/**
* A basic implementation of the DiscoverableService interface. To services are
* considered equal if they have the same case-sensitive service name.
*
*/
public class DiscoverableServiceImpl implements DiscoverableService {
private final String serviceName;
private final InetSocketAddress serviceAddress;
public DiscoverableServiceImpl(final String serviceName, final InetSocketAddress serviceAddress) {
if (StringUtils.isBlank(serviceName)) {
throw new IllegalArgumentException("Service name may not be null or empty.");
} else if (serviceAddress == null) {
throw new IllegalArgumentException("Service address may not be null.");
}
this.serviceName = serviceName;
this.serviceAddress = serviceAddress;
}
@Override
public InetSocketAddress getServiceAddress() {
return serviceAddress;
}
@Override
public String getServiceName() {
return serviceName;
}
@Override
public String toString() {
return String.format("[Discoverable Service: %s available at %s:%d]", serviceName, serviceAddress.getHostName(), serviceAddress.getPort());
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof DiscoverableService)) {
return false;
}
final DiscoverableService other = (DiscoverableService) obj;
return !((this.serviceName == null) ? (other.getServiceName() != null) : !this.serviceName.equals(other.getServiceName()));
}
@Override
public int hashCode() {
int hash = 5;
hash = 53 * hash + (this.serviceName != null ? this.serviceName.hashCode() : 0);
return hash;
}
}

View File

@ -1,98 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.io.socket.multicast;
/**
*/
public final class MulticastConfiguration {
private MulticastTimeToLive ttl = DEFAULT_MULTICAST_TTL;
private Integer socketTimeout;
private Integer receiveBufferSize;
private Integer sendBufferSize;
private Boolean reuseAddress;
private Integer trafficClass;
private Boolean loopbackMode;
public static final MulticastTimeToLive DEFAULT_MULTICAST_TTL = MulticastTimeToLive.SAME_SUBNET;
public MulticastTimeToLive getTtl() {
return ttl;
}
public void setTtl(final MulticastTimeToLive ttl) {
if (ttl == null) {
throw new NullPointerException("Multicast TTL may not be null.");
}
this.ttl = ttl;
}
public Integer getSocketTimeout() {
return socketTimeout;
}
public void setSocketTimeout(Integer socketTimeout) {
this.socketTimeout = socketTimeout;
}
public Boolean getReuseAddress() {
return reuseAddress;
}
public void setReuseAddress(Boolean reuseAddress) {
this.reuseAddress = reuseAddress;
}
public Integer getReceiveBufferSize() {
return receiveBufferSize;
}
public void setReceiveBufferSize(Integer receiveBufferSize) {
this.receiveBufferSize = receiveBufferSize;
}
public Integer getSendBufferSize() {
return sendBufferSize;
}
public void setSendBufferSize(Integer sendBufferSize) {
this.sendBufferSize = sendBufferSize;
}
public Integer getTrafficClass() {
return trafficClass;
}
public void setTrafficClass(Integer trafficClass) {
this.trafficClass = trafficClass;
}
public Boolean getLoopbackMode() {
return loopbackMode;
}
public void setLoopbackMode(Boolean loopbackMode) {
this.loopbackMode = loopbackMode;
}
}

View File

@ -1,192 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.io.socket.multicast;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.InetSocketAddress;
import java.net.MulticastSocket;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implements a listener for protocol messages sent over multicast. If a message
* is of type MulticastProtocolMessage, then the underlying protocol message is
* passed to the handler. If the receiving handler produces a message response,
* then the message is wrapped with a MulticastProtocolMessage before being sent
* to the originator.
*
*/
public abstract class MulticastListener {
// constants
private static final int DEFAULT_SHUTDOWN_LISTENER_SECONDS = 5;
private static final int DEFAULT_MAX_PACKET_SIZE_BYTES = 512;
private static final Logger logger = new org.apache.nifi.logging.NiFiLog(LoggerFactory.getLogger(MulticastListener.class));
// immutable members
private final int numThreads;
private final InetSocketAddress multicastAddress;
private final MulticastConfiguration configuration;
private volatile ExecutorService executorService; // volatile to guarantee most current value is visible
private volatile MulticastSocket multicastSocket; // volatile to guarantee most current value is visible
private int shutdownListenerSeconds = DEFAULT_SHUTDOWN_LISTENER_SECONDS;
private int maxPacketSizeBytes = DEFAULT_MAX_PACKET_SIZE_BYTES;
public MulticastListener(
final int numThreads,
final InetSocketAddress multicastAddress,
final MulticastConfiguration configuration) {
if (numThreads <= 0) {
throw new IllegalArgumentException("Number of threads may not be less than or equal to zero.");
} else if (multicastAddress == null) {
throw new IllegalArgumentException("Multicast address may not be null.");
} else if (multicastAddress.getAddress().isMulticastAddress() == false) {
throw new IllegalArgumentException("Multicast group must be a Class D address.");
} else if (configuration == null) {
throw new IllegalArgumentException("Multicast configuration may not be null.");
}
this.numThreads = numThreads;
this.multicastAddress = multicastAddress;
this.configuration = configuration;
}
/**
* Implements the action to perform when a new datagram is received. This
* class must not close the multicast socket.
*
* @param multicastSocket socket
* @param packet the datagram packet
*/
public abstract void dispatchRequest(final MulticastSocket multicastSocket, final DatagramPacket packet);
public void start() throws IOException {
if (isRunning()) {
return;
}
multicastSocket = MulticastUtils.createMulticastSocket(multicastAddress.getPort(), configuration);
multicastSocket.joinGroup(multicastAddress.getAddress());
executorService = Executors.newFixedThreadPool(numThreads);
final ExecutorService runnableExecServiceRef = executorService;
final MulticastSocket runnableMulticastSocketRef = multicastSocket;
new Thread(new Runnable() {
@Override
public void run() {
while (runnableExecServiceRef.isShutdown() == false) {
try {
final byte[] buf = new byte[maxPacketSizeBytes];
final DatagramPacket packet = new DatagramPacket(buf, maxPacketSizeBytes);
runnableMulticastSocketRef.receive(packet);
runnableExecServiceRef.execute(new Runnable() {
@Override
public void run() {
dispatchRequest(multicastSocket, packet);
}
});
} catch (final SocketException | SocketTimeoutException ste) {
/* ignore so that we can accept connections in approximately a non-blocking fashion */
} catch (final Exception e) {
logger.warn("Cluster protocol receiver encountered exception: " + e, e);
}
}
}
}).start();
}
public boolean isRunning() {
return (executorService != null && executorService.isShutdown() == false);
}
public void stop() throws IOException {
if (isRunning() == false) {
return;
}
// shutdown executor service
try {
if (getShutdownListenerSeconds() <= 0) {
executorService.shutdownNow();
} else {
executorService.shutdown();
}
executorService.awaitTermination(getShutdownListenerSeconds(), TimeUnit.SECONDS);
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
} finally {
if (executorService.isTerminated()) {
logger.info("Multicast Listener has been terminated successfully.");
} else {
logger.warn("Multicast Listener has not terminated properly. There exists an uninterruptable thread that will take an indeterminate amount of time to stop.");
}
}
// shutdown server socket
if (multicastSocket.isClosed() == false) {
multicastSocket.leaveGroup(multicastAddress.getAddress());
multicastSocket.close();
}
}
public int getShutdownListenerSeconds() {
return shutdownListenerSeconds;
}
public void setShutdownListenerSeconds(final int shutdownListenerSeconds) {
this.shutdownListenerSeconds = shutdownListenerSeconds;
}
public int getMaxPacketSizeBytes() {
return maxPacketSizeBytes;
}
public void setMaxPacketSizeBytes(int maxPacketSizeBytes) {
if (maxPacketSizeBytes <= 0) {
throw new IllegalArgumentException("Max packet size must be greater than zero bytes.");
}
this.maxPacketSizeBytes = maxPacketSizeBytes;
}
public MulticastConfiguration getConfiguration() {
return configuration;
}
public InetSocketAddress getMulticastAddress() {
return multicastAddress;
}
public int getNumThreads() {
return numThreads;
}
}

View File

@ -1,33 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.io.socket.multicast;
import java.net.InetSocketAddress;
/**
* Defines the interface for discovering services based on name. Services are
* expected to be exposed via socket address and port.
*
*/
public interface MulticastServiceDiscovery extends ServiceDiscovery {
/**
* @return the multicast address
*/
InetSocketAddress getMulticastAddress();
}

View File

@ -1,32 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.io.socket.multicast;
import java.net.InetSocketAddress;
/**
* Defines the interface for broadcasting a service via multicast.
*
*/
public interface MulticastServicesBroadcaster extends ServicesBroadcaster {
/**
* @return the multicast address
*/
InetSocketAddress getMulticastAddress();
}

View File

@ -1,49 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.io.socket.multicast;
/**
*/
public enum MulticastTimeToLive {
SAME_HOST(0),
SAME_SUBNET(1),
SAME_SITE(32),
SAME_REGION(64),
SAME_CONTINENT(128),
UNRESTRICTED(255);
private final int ttl;
MulticastTimeToLive(final int ttl) {
this.ttl = ttl;
}
public int getTtl() {
return ttl;
}
public MulticastTimeToLive valueOfByTtl(final int ttl) {
for (final MulticastTimeToLive value : values()) {
if (value.getTtl() == ttl) {
return value;
}
}
return null;
}
}

View File

@ -1,108 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.io.socket.multicast;
import java.io.IOException;
import java.net.InetAddress;
import java.net.MulticastSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*/
public final class MulticastUtils {
private static final Logger logger = new org.apache.nifi.logging.NiFiLog(LoggerFactory.getLogger(MulticastUtils.class));
public static MulticastSocket createMulticastSocket(final MulticastConfiguration config) throws IOException {
return createMulticastSocket(0, config);
}
public static MulticastSocket createMulticastSocket(final int port, final MulticastConfiguration config) throws IOException {
if (config == null) {
throw new IllegalArgumentException("Configuration may not be null.");
}
final MulticastSocket socket;
if (port <= 0) {
socket = new MulticastSocket();
} else {
socket = new MulticastSocket(port);
}
socket.setTimeToLive(config.getTtl().getTtl());
if (config.getSocketTimeout() != null) {
socket.setSoTimeout(config.getSocketTimeout());
}
if (config.getReuseAddress() != null) {
socket.setReuseAddress(config.getReuseAddress());
}
if (config.getReceiveBufferSize() != null) {
socket.setReceiveBufferSize(config.getReceiveBufferSize());
}
if (config.getSendBufferSize() != null) {
socket.setSendBufferSize(config.getSendBufferSize());
}
if (config.getTrafficClass() != null) {
socket.setTrafficClass(config.getTrafficClass());
}
if (config.getLoopbackMode() != null) {
socket.setLoopbackMode(config.getLoopbackMode());
}
return socket;
}
public static void closeQuietly(final MulticastSocket socket) {
if (socket == null) {
return;
}
try {
socket.close();
} catch (final Exception ex) {
logger.debug("Failed to close multicast socket due to: " + ex, ex);
}
}
public static void closeQuietly(final MulticastSocket socket, final InetAddress groupAddress) {
if (socket == null) {
return;
}
try {
socket.leaveGroup(groupAddress);
} catch (final Exception ex) {
logger.debug("Failed to leave multicast group due to: " + ex, ex);
}
try {
socket.close();
} catch (final Exception ex) {
logger.debug("Failed to close multicast socket due to: " + ex, ex);
}
}
}

View File

@ -1,30 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.io.socket.multicast;
/**
* Defines a generic interface for discovering services.
*
*/
public interface ServiceDiscovery {
/**
* @return the discovered service
*/
DiscoverableService getService();
}

View File

@ -1,55 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.io.socket.multicast;
import java.util.Set;
/**
* Defines the interface for broadcasting a collection of services for client
* discovery.
*
*/
public interface ServicesBroadcaster {
/**
* @return the delay in milliseconds to wait between successive broadcasts
*/
int getBroadcastDelayMs();
/**
* @return the broadcasted services
*/
Set<DiscoverableService> getServices();
/**
* Adds the given service to the set of broadcasted services.
*
* @param service a service
* @return true if the service was added to the set; false a service with
* the given service name already exists in the set.
*/
boolean addService(DiscoverableService service);
/**
* Removes the service with the given service name from the set.
*
* @param serviceName a service name
* @return true if the service was removed; false otherwise
*/
boolean removeService(String serviceName);
}

View File

@ -16,19 +16,7 @@
*/
package org.apache.nifi.processor.util;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.components.Validator;
import org.apache.nifi.components.resource.ResourceCardinality;
import org.apache.nifi.components.resource.ResourceType;
import org.apache.nifi.expression.AttributeExpression.ResultType;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.util.FormatUtils;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URI;
import java.nio.charset.Charset;
import java.nio.charset.UnsupportedCharsetException;
@ -39,6 +27,14 @@ import java.util.Arrays;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.components.Validator;
import org.apache.nifi.expression.AttributeExpression.ResultType;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.util.FormatUtils;
public class StandardValidators {
@ -290,9 +286,6 @@ public class StandardValidators {
}
}
};
// Old name retained for compatibility
@Deprecated
public static final Validator ISO8061_INSTANT_VALIDATOR = ISO8601_INSTANT_VALIDATOR;
public static final Validator NON_NEGATIVE_INTEGER_VALIDATOR = new Validator() {
@Override
@ -556,47 +549,6 @@ public class StandardValidators {
};
}
/**
* @deprecated use {@link org.apache.nifi.components.PropertyDescriptor.Builder#identifiesExternalResource(ResourceCardinality, ResourceType, ResourceType...)
* identifiesExternalResource(ResourceCardinality.SINGLE, ResourceType.FILE, ResourceType.DIRECTORY, ResourceType.URL}
* instead.
*/
@Deprecated
public static Validator createURLorFileValidator() {
return (subject, input, context) -> {
if (context.isExpressionLanguageSupported(subject) && context.isExpressionLanguagePresent(input)) {
return new ValidationResult.Builder().subject(subject).input(input).explanation("Expression Language Present").valid(true).build();
}
try {
PropertyValue propertyValue = context.newPropertyValue(input);
String evaluatedInput = (propertyValue == null) ? input : propertyValue.evaluateAttributeExpressions().getValue();
boolean validUrl = true;
// First check to see if it is a valid URL
try {
URI.create(evaluatedInput).toURL();
} catch (IllegalArgumentException | MalformedURLException mue) {
validUrl = false;
}
boolean validFile = true;
if (!validUrl) {
// Check to see if it is a file and it exists
final File file = new File(evaluatedInput);
validFile = file.exists();
}
final boolean valid = validUrl || validFile;
final String reason = valid ? "Valid URL or file" : "Not a valid URL or file";
return new ValidationResult.Builder().subject(subject).input(input).explanation(reason).valid(valid).build();
} catch (final Exception e) {
return new ValidationResult.Builder().subject(subject).input(input).explanation("Not a valid URL or file").valid(false).build();
}
};
}
public static Validator createListValidator(boolean trimEntries, boolean excludeEmptyEntries,
Validator elementValidator) {
@ -688,7 +640,7 @@ public class StandardValidators {
.subject(subject)
.input(input)
.valid(false)
.explanation("Failed to evaluate the Attribute Expression Language due to " + e.toString())
.explanation("Failed to evaluate the Attribute Expression Language due to " + e)
.build();
}
} else {
@ -747,7 +699,7 @@ public class StandardValidators {
.subject(subject)
.input(value)
.valid(false)
.explanation("Failed to evaluate the Attribute Expression Language due to " + e.toString())
.explanation("Failed to evaluate the Attribute Expression Language due to " + e)
.build();
}
} else {
@ -844,8 +796,8 @@ public class StandardValidators {
public TimePeriodValidator(final long minValue, final TimeUnit minTimeUnit, final long maxValue, final TimeUnit maxTimeUnit) {
this.minNanos = TimeUnit.NANOSECONDS.convert(minValue, minTimeUnit);
this.maxNanos = TimeUnit.NANOSECONDS.convert(maxValue, maxTimeUnit);
this.minValueEnglish = minValue + " " + minTimeUnit.toString();
this.maxValueEnglish = maxValue + " " + maxTimeUnit.toString();
this.minValueEnglish = minValue + " " + minTimeUnit;
this.maxValueEnglish = maxValue + " " + maxTimeUnit;
}
@Override

View File

@ -156,13 +156,13 @@ public class FormatUtils {
* Returns a time duration in the requested {@link TimeUnit} after parsing the {@code String}
* input. If the resulting value is a decimal (i.e.
* {@code 25 hours -> TimeUnit.DAYS = 1.04}), the value is rounded.
* Use {@link #getPreciseTimeDuration(String, TimeUnit)} if fractional values are desirable
*
* @param value the raw String input (i.e. "28 minutes")
* @param desiredUnit the requested output {@link TimeUnit}
* @return the whole number value of this duration in the requested units
* @deprecated As of Apache NiFi 1.9.0, because this method only returns whole numbers, use {@link #getPreciseTimeDuration(String, TimeUnit)} when possible.
* @see #getPreciseTimeDuration(String, TimeUnit)
*/
@Deprecated
public static long getTimeDuration(final String value, final TimeUnit desiredUnit) {
return Math.round(getPreciseTimeDuration(value, desiredUnit));
}

View File

@ -16,6 +16,8 @@
*/
package org.apache.nifi.util.validator;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
@ -24,9 +26,6 @@ import org.apache.nifi.processor.util.StandardValidators;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ -466,39 +465,10 @@ public class TestStandardValidators {
assertEquals(2, mockValidator.getValidateCallCount());
}
@Test
public void testCreateURLorFileValidator() {
Validator val = StandardValidators.createURLorFileValidator();
ValidationResult vr;
final ValidationContext validationContext = Mockito.mock(ValidationContext.class);
vr = val.validate("URLorFile", null, validationContext);
assertFalse(vr.isValid());
vr = val.validate("URLorFile", "", validationContext);
assertFalse(vr.isValid());
vr = val.validate("URLorFile", "http://nifi.apache.org", validationContext);
assertTrue(vr.isValid());
vr = val.validate("URLorFile", "http//nifi.apache.org", validationContext);
assertFalse(vr.isValid());
vr = val.validate("URLorFile", "nifi.apache.org", validationContext);
assertFalse(vr.isValid());
vr = val.validate("URLorFile", "src/test/resources/this_file_exists.txt", validationContext);
assertTrue(vr.isValid());
vr = val.validate("URLorFile", "src/test/resources/this_file_does_not_exist.txt", validationContext);
assertFalse(vr.isValid());
}
@Test
public void testiso8061InstantValidator() {
Validator val = StandardValidators.ISO8061_INSTANT_VALIDATOR;
Validator val = StandardValidators.ISO8601_INSTANT_VALIDATOR;
ValidationContext vc = mock(ValidationContext.class);
ValidationResult vr = val.validate("foo", "", vc);
assertFalse(vr.isValid());

View File

@ -16,15 +16,13 @@
*/
package org.apache.nifi.controller.repository;
import org.apache.nifi.controller.repository.claim.ContentClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Set;
import org.apache.nifi.controller.repository.claim.ContentClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
/**
* Defines the capabilities of a content repository. Append options are not
@ -143,24 +141,6 @@ public interface ContentRepository {
*/
ContentClaim clone(ContentClaim original, boolean lossTolerant) throws IOException;
/**
* Creates a new content item that is the merger in iteration order of all
* content for the given claims
*
* @return the size of the destination
* @param claims the claims to merge which will be combined in order of
* collection iteration
* @param destination the claim to write the merged content to
* @param header if supplied will be prepended to the output
* @param footer if supplied will be appended to the output
* @param demarcator if supplied will be placed in between each merged
* object
* @throws IOException if unable to merge
* @throws IllegalArgumentException if the given destination is included in
* the given claims
*/
@Deprecated
long merge(Collection<ContentClaim> claims, ContentClaim destination, byte[] header, byte[] footer, byte[] demarcator) throws IOException;
/**
* Imports content from the given path creating a new content object and

View File

@ -41,21 +41,4 @@ public interface NiFiWebRequestContext {
*/
String getId();
/**
* Returns the proxied entities chain. The format of the chain is as
* follows:
*
* <code>
* &lt;CN=original-proxied-entity&gt;&lt;CN=first-proxy&gt;&lt;CN=second-proxy&gt;...
* </code>
*
* Update:
* This method has been deprecated since the entire proxy
* chain is able to be rebuilt using the current user if necessary.
*
* @return the proxied entities chain or null if no chain
*/
@Deprecated
String getProxiedEntitiesChain();
}

View File

@ -110,14 +110,10 @@ public class MockStateManager implements StateManager {
}
public long getRetrievalCount(final Scope scope) {
switch (scope) {
case CLUSTER:
return clusterRetrievedCount.get();
case LOCAL:
return localRetrievedCount.get();
default:
throw new IllegalArgumentException("Invalid scope: " + scope);
}
return switch (scope) {
case CLUSTER -> clusterRetrievedCount.get();
case LOCAL -> localRetrievedCount.get();
};
}
@Override

View File

@ -20,20 +20,21 @@ package org.apache.nifi.state;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import org.apache.nifi.components.state.StateMap;
public class MockStateMap implements StateMap {
private final Map<String, String> stateValues;
private final long version;
private final String version;
public MockStateMap(final Map<String, String> stateValues, final long version) {
this.stateValues = stateValues == null ? Collections.emptyMap() : new HashMap<>(stateValues);
this.version = version;
this.version = Long.toString(version);
}
@Override
public long getVersion() {
return version;
public Optional<String> getStateVersion() {
return version.equals("-1") ? Optional.empty() : Optional.of(version);
}
@Override

View File

@ -16,26 +16,6 @@
*/
package org.apache.nifi.util;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateManager;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.controller.queue.QueueSize;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.processor.FlowFileFilter;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Processor;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.FlowFileAccessException;
import org.apache.nifi.processor.exception.FlowFileHandlingException;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.io.OutputStreamCallback;
import org.apache.nifi.processor.io.StreamCallback;
import org.apache.nifi.provenance.ProvenanceReporter;
import org.apache.nifi.state.MockStateManager;
import org.junit.jupiter.api.Assertions;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
@ -63,6 +43,25 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateManager;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.controller.queue.QueueSize;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.processor.FlowFileFilter;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Processor;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.FlowFileAccessException;
import org.apache.nifi.processor.exception.FlowFileHandlingException;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.io.OutputStreamCallback;
import org.apache.nifi.processor.io.StreamCallback;
import org.apache.nifi.provenance.ProvenanceReporter;
import org.apache.nifi.state.MockStateManager;
import org.junit.jupiter.api.Assertions;
public class MockProcessSession implements ProcessSession {
@ -583,12 +582,7 @@ public class MockProcessSession implements ProcessSession {
}
@Override
public void read(final FlowFile flowFile, final InputStreamCallback callback) {
read(flowFile, false, callback);
}
@Override
public void read(FlowFile flowFile, boolean allowSessionStreamManagement, final InputStreamCallback callback) {
public void read(FlowFile flowFile, final InputStreamCallback callback) {
if (callback == null || flowFile == null) {
throw new IllegalArgumentException("argument cannot be null");
}
@ -603,9 +597,7 @@ public class MockProcessSession implements ProcessSession {
incrementReadCount(flowFile);
try {
callback.process(bais);
if(!allowSessionStreamManagement){
bais.close();
}
bais.close();
} catch (final IOException e) {
throw new ProcessException(e.toString(), e);
} finally {

View File

@ -16,6 +16,11 @@
*/
package org.apache.nifi.util;
import java.time.Duration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.attribute.expression.language.Query;
import org.apache.nifi.attribute.expression.language.Query.Range;
@ -36,11 +41,6 @@ import org.apache.nifi.parameter.ParameterLookup;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.exception.ProcessException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class MockPropertyValue implements PropertyValue {
private final String rawValue;
private final Boolean expectExpressions;
@ -175,6 +175,11 @@ public class MockPropertyValue implements PropertyValue {
return stdPropValue.asTimePeriod(timeUnit);
}
@Override
public Duration asDuration() {
return isSet() ? Duration.ofNanos(asTimePeriod(TimeUnit.NANOSECONDS)) : null;
}
@Override
public Double asDataSize(final DataUnit dataUnit) {
ensureExpressionsEvaluated();

View File

@ -45,39 +45,30 @@ public class ReflectionUtils {
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
for (final Method method : instance.getClass().getMethods()) {
if (method.isAnnotationPresent(annotation)) {
final boolean isAccessible = method.isAccessible();
method.setAccessible(true);
try {
final Class<?>[] argumentTypes = method.getParameterTypes();
if (argumentTypes.length > args.length) {
throw new IllegalArgumentException(String.format("Unable to invoke method %1$s on %2$s because method expects %3$s parameters but only %4$s were given",
method.getName(), instance, argumentTypes.length, args.length));
}
final Class<?>[] argumentTypes = method.getParameterTypes();
if (argumentTypes.length > args.length) {
throw new IllegalArgumentException(String.format("Unable to invoke method %1$s on %2$s because method expects %3$s parameters but only %4$s were given",
method.getName(), instance, argumentTypes.length, args.length));
}
for (int i = 0; i < argumentTypes.length; i++) {
final Class<?> argType = argumentTypes[i];
if (!argType.isAssignableFrom(args[i].getClass())) {
throw new IllegalArgumentException(String.format(
"Unable to invoke method %1$s on %2$s because method parameter %3$s is expected to be of type %4$s but argument passed was of type %5$s",
method.getName(), instance, i, argType, args[i].getClass()));
}
for (int i = 0; i < argumentTypes.length; i++) {
final Class<?> argType = argumentTypes[i];
if (!argType.isAssignableFrom(args[i].getClass())) {
throw new IllegalArgumentException(String.format(
"Unable to invoke method %1$s on %2$s because method parameter %3$s is expected to be of type %4$s but argument passed was of type %5$s",
method.getName(), instance, i, argType, args[i].getClass()));
}
}
if (argumentTypes.length == args.length) {
method.invoke(instance, args);
} else {
final Object[] argsToPass = new Object[argumentTypes.length];
for (int i = 0; i < argsToPass.length; i++) {
argsToPass[i] = args[i];
}
if (argumentTypes.length == args.length) {
method.invoke(instance, args);
} else {
final Object[] argsToPass = new Object[argumentTypes.length];
System.arraycopy(args, 0, argsToPass, 0, argsToPass.length);
method.invoke(instance, argsToPass);
}
} finally {
if (!isAccessible) {
method.setAccessible(false);
}
method.invoke(instance, argsToPass);
}
}
}
@ -102,46 +93,37 @@ public class ReflectionUtils {
public static boolean quietlyInvokeMethodsWithAnnotation(final Class<? extends Annotation> annotation, final Object instance, final Object... args) {
for (final Method method : instance.getClass().getMethods()) {
if (method.isAnnotationPresent(annotation)) {
final boolean isAccessible = method.isAccessible();
method.setAccessible(true);
final Class<?>[] argumentTypes = method.getParameterTypes();
if (argumentTypes.length > args.length) {
LOG.error("Unable to invoke method {} on {} because method expects {} parameters but only {} were given",
new Object[]{method.getName(), instance, argumentTypes.length, args.length});
return false;
}
for (int i = 0; i < argumentTypes.length; i++) {
final Class<?> argType = argumentTypes[i];
if (!argType.isAssignableFrom(args[i].getClass())) {
LOG.error("Unable to invoke method {} on {} because method parameter {} is expected to be of type {} but argument passed was of type {}",
new Object[]{method.getName(), instance, i, argType, args[i].getClass()});
return false;
}
}
try {
final Class<?>[] argumentTypes = method.getParameterTypes();
if (argumentTypes.length > args.length) {
LOG.error("Unable to invoke method {} on {} because method expects {} parameters but only {} were given",
new Object[]{method.getName(), instance, argumentTypes.length, args.length});
return false;
}
if (argumentTypes.length == args.length) {
method.invoke(instance, args);
} else {
final Object[] argsToPass = new Object[argumentTypes.length];
System.arraycopy(args, 0, argsToPass, 0, argsToPass.length);
for (int i = 0; i < argumentTypes.length; i++) {
final Class<?> argType = argumentTypes[i];
if (!argType.isAssignableFrom(args[i].getClass())) {
LOG.error("Unable to invoke method {} on {} because method parameter {} is expected to be of type {} but argument passed was of type {}",
new Object[]{method.getName(), instance, i, argType, args[i].getClass()});
return false;
}
}
try {
if (argumentTypes.length == args.length) {
method.invoke(instance, args);
} else {
final Object[] argsToPass = new Object[argumentTypes.length];
for (int i = 0; i < argsToPass.length; i++) {
argsToPass[i] = args[i];
}
method.invoke(instance, argsToPass);
}
} catch (final IllegalAccessException | IllegalArgumentException | InvocationTargetException t) {
LOG.error("Unable to invoke method {} on {} due to {}", new Object[]{method.getName(), instance, t});
LOG.error("", t);
return false;
}
} finally {
if (!isAccessible) {
method.setAccessible(false);
method.invoke(instance, argsToPass);
}
} catch (final IllegalAccessException | IllegalArgumentException | InvocationTargetException t) {
LOG.error("Unable to invoke method {} on {} due to {}", new Object[]{method.getName(), instance, t});
LOG.error("", t);
return false;
}
}
}

View File

@ -116,7 +116,7 @@ public class TestRunners {
*/
public static TestRunner newTestRunner(final Class<? extends Processor> processorClass, String name) {
try {
return newTestRunner(processorClass.newInstance(), name);
return newTestRunner(processorClass.getDeclaredConstructor().newInstance(), name);
} catch (final Exception e) {
System.err.println("Could not instantiate instance of class " + processorClass.getName() + " due to: " + e);
throw new RuntimeException(e);
@ -133,7 +133,7 @@ public class TestRunners {
*/
public static TestRunner newTestRunner(final Class<? extends Processor> processorClass, String name, MockComponentLog logger) {
try {
return newTestRunner(processorClass.newInstance(), name, logger);
return newTestRunner(processorClass.getDeclaredConstructor().newInstance(), name, logger);
} catch (final Exception e) {
System.err.println("Could not instantiate instance of class " + processorClass.getName() + " due to: " + e);
throw new RuntimeException(e);

View File

@ -1,42 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.accumulo.controllerservices;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.TestRunner;
import java.util.HashMap;
import java.util.Map;
public class MockAccumuloService {
public static AccumuloService getService(final TestRunner runner, final String zk, final String instanceName, final String user, final String password) throws InitializationException {
final AccumuloService accclient = new AccumuloService();
Map<String,String> properties = new HashMap<>();
properties.put(AccumuloService.ACCUMULO_PASSWORD.getName(), password);
properties.put(AccumuloService.AUTHENTICATION_TYPE.getName(), "PASSWORD");
properties.put(AccumuloService.ACCUMULO_USER.getName(), user);
properties.put(AccumuloService.ZOOKEEPER_QUORUM.getName(), zk);
properties.put(AccumuloService.INSTANCE_NAME.getName(), instanceName);
runner.addControllerService("accclient", accclient, properties);
runner.enableControllerService(accclient);
runner.setProperty("accumulo-connector-service","accclient");
return accclient;
}
}

View File

@ -1,225 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.accumulo.processors;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.BatchScanner;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.accumulo.minicluster.MiniAccumuloCluster;
import org.apache.hadoop.io.Text;
import org.apache.nifi.accumulo.controllerservices.AccumuloService;
import org.apache.nifi.accumulo.controllerservices.MockAccumuloService;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.serialization.record.MockRecordParser;
import org.apache.nifi.serialization.record.RecordFieldType;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledOnOs;
import org.junit.jupiter.api.condition.OS;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
@DisabledOnOs(OS.WINDOWS)
public class PutRecordIT {
public static final String DEFAULT_COLUMN_FAMILY = "family1";
/**
* Though deprecated in 2.0 it still functions very well
*/
private static MiniAccumuloCluster accumulo;
private TestRunner getTestRunner(String table, String columnFamily) {
final TestRunner runner = TestRunners.newTestRunner(PutAccumuloRecord.class);
runner.enforceReadStreamsClosed(false);
runner.setProperty(PutAccumuloRecord.TABLE_NAME, table);
runner.setProperty(PutAccumuloRecord.COLUMN_FAMILY, columnFamily);
return runner;
}
@BeforeAll
public static void setupInstance() throws IOException, InterruptedException, AccumuloSecurityException, AccumuloException, TableExistsException {
Path tempDirectory = Files.createTempDirectory("acc"); // JUnit and Guava supply mechanisms for creating temp directories
accumulo = new MiniAccumuloCluster(tempDirectory.toFile(), "password");
accumulo.start();
}
private Set<Key> generateTestData(TestRunner runner, boolean valueincq, String delim, String cv) throws IOException {
final MockRecordParser parser = new MockRecordParser();
try {
runner.addControllerService("parser", parser);
} catch (InitializationException e) {
throw new IOException(e);
}
runner.enableControllerService(parser);
runner.setProperty(PutAccumuloRecord.RECORD_READER_FACTORY, "parser");
long ts = System.currentTimeMillis();
parser.addSchemaField("id", RecordFieldType.STRING);
parser.addSchemaField("name", RecordFieldType.STRING);
parser.addSchemaField("code", RecordFieldType.STRING);
parser.addSchemaField("timestamp", RecordFieldType.LONG);
Set<Key> expectedKeys = new HashSet<>();
ColumnVisibility colViz = new ColumnVisibility();
if (null != cv)
colViz = new ColumnVisibility(cv);
Random random = new Random();
for (int x = 0; x < 5; x++) {
//final int row = random.nextInt(10000000);
final String row = UUID.randomUUID().toString();
final String cf = UUID.randomUUID().toString();
final String cq = UUID.randomUUID().toString();
Text keyCq = new Text("name");
if (valueincq){
if (null != delim && !delim.isEmpty())
keyCq.append(delim.getBytes(),0,delim.length());
keyCq.append(cf.getBytes(),0,cf.length());
}
expectedKeys.add(new Key(new Text(row), new Text("family1"), keyCq, colViz,ts));
keyCq = new Text("code");
if (valueincq){
if (null != delim && !delim.isEmpty())
keyCq.append(delim.getBytes(),0,delim.length());
keyCq.append(cq.getBytes(),0,cq.length());
}
expectedKeys.add(new Key(new Text(row), new Text("family1"), keyCq, colViz, ts));
parser.addRecord(row, cf, cq, ts);
}
return expectedKeys;
}
void verifyKey(String tableName, Set<Key> expectedKeys, Authorizations auths) throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
if (null == auths)
auths = new Authorizations();
try(BatchScanner scanner = accumulo.createAccumuloClient("root", new PasswordToken("password")).createBatchScanner(tableName,auths,1)) {
List<Range> ranges = new ArrayList<>();
ranges.add(new Range());
scanner.setRanges(ranges);
for (Map.Entry<Key, Value> kv : scanner) {
assertTrue(expectedKeys.remove(kv.getKey()), kv.getKey() + " not in expected keys");
}
}
assertEquals(0, expectedKeys.size());
}
private void basicPutSetup(boolean valueincq) throws Exception {
basicPutSetup(valueincq,null,null,null,false);
}
private void basicPutSetup(boolean valueincq, final String delim) throws Exception {
basicPutSetup(valueincq,delim,null,null,false);
}
private void basicPutSetup(boolean valueincq,String delim, String auths, Authorizations defaultVis, boolean deletes) throws Exception {
String tableName = UUID.randomUUID().toString();
tableName=tableName.replace("-","a");
if (null != defaultVis)
accumulo.createAccumuloClient("root", new PasswordToken("password")).securityOperations().changeUserAuthorizations("root",defaultVis);
TestRunner runner = getTestRunner(tableName, DEFAULT_COLUMN_FAMILY);
runner.setProperty(PutAccumuloRecord.CREATE_TABLE, "True");
runner.setProperty(PutAccumuloRecord.ROW_FIELD_NAME, "id");
runner.setProperty(PutAccumuloRecord.COLUMN_FAMILY, DEFAULT_COLUMN_FAMILY);
runner.setProperty(PutAccumuloRecord.TIMESTAMP_FIELD, "timestamp");
if (valueincq) {
if (null != delim){
runner.setProperty(PutAccumuloRecord.FIELD_DELIMITER, delim);
}
runner.setProperty(PutAccumuloRecord.RECORD_IN_QUALIFIER, "True");
}
if (null != defaultVis){
runner.setProperty(PutAccumuloRecord.DEFAULT_VISIBILITY, auths);
}
AccumuloService client = MockAccumuloService.getService(runner,accumulo.getZooKeepers(),accumulo.getInstanceName(),"root","password");
Set<Key> expectedKeys = generateTestData(runner,valueincq,delim, auths);
runner.enqueue("Test".getBytes("UTF-8")); // This is to coax the processor into reading the data in the reader.l
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(PutAccumuloRecord.REL_SUCCESS);
assertTrue(results.size() == 1, "Wrong count");
verifyKey(tableName, expectedKeys, defaultVis);
if (deletes){
runner.setProperty(PutAccumuloRecord.DELETE_KEY, "true");
runner.enqueue("Test".getBytes("UTF-8")); // This is to coax the processor into reading the data in the reader.l
runner.run();
runner.getFlowFilesForRelationship(PutAccumuloRecord.REL_SUCCESS);
verifyKey(tableName, new HashSet<>(), defaultVis);
}
}
@Test
public void testByteEncodedPut() throws Exception {
basicPutSetup(false);
}
@Test
public void testByteEncodedPutThenDelete() throws Exception {
basicPutSetup(true,null,"A&B",new Authorizations("A","B"),true);
}
@Test
public void testByteEncodedPutCq() throws Exception {
basicPutSetup(true);
}
@Test
public void testByteEncodedPutCqDelim() throws Exception {
basicPutSetup(true,"\u0000");
}
@Test
public void testByteEncodedPutCqWithVis() throws Exception {
basicPutSetup(true,null,"A&B",new Authorizations("A","B"),false);
}
}

View File

@ -1,272 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.accumulo.processors;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.BatchWriterConfig;
import org.apache.accumulo.core.client.MultiTableBatchWriter;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.accumulo.minicluster.MiniAccumuloCluster;
import org.apache.hadoop.io.Text;
import org.apache.nifi.accumulo.controllerservices.MockAccumuloService;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.serialization.record.MockRecordWriter;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledOnOs;
import org.junit.jupiter.api.condition.OS;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.UUID;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
@DisabledOnOs(OS.WINDOWS)
public class ScanAccumuloIT {
public static final String DEFAULT_COLUMN_FAMILY = "family1";
private static final MockRecordWriter PARSER = new MockRecordWriter();
/**
* Though deprecated in 2.0 it still functions very well
*/
private static MiniAccumuloCluster accumulo;
@BeforeAll
public static void setupInstance() throws IOException, InterruptedException {
Path tempDirectory = Files.createTempDirectory("acc"); // JUnit and Guava supply mechanisms for creating temp directories
accumulo = new MiniAccumuloCluster(tempDirectory.toFile(), "password");
accumulo.start();
}
@Test
public void testPullDatWithFlowFile() throws Exception {
TestRunner runner = createTestEnvironment("","","","",false,"",null);
// This is to coax the processor into reading the data in the reader.
runner.enqueue("Test".getBytes(StandardCharsets.UTF_8));
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(ScanAccumulo.REL_SUCCESS);
assertEquals(1, results.size(), "Wrong count, received " + results.size());
assertRecordCount(results, 5);
}
@Test
public void testPullDatWithOutFlowFile() throws Exception {
TestRunner runner = createTestEnvironment("","","","",false,"",null);
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(ScanAccumulo.REL_SUCCESS);
assertEquals(1, results.size(), "Wrong count, received " + results.size());
assertRecordCount(results, 5);
}
@Test
public void testSameRowCf() throws Exception {
TestRunner runner = createTestEnvironment("2019","2019","family1","family2",false,"",null);
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(ScanAccumulo.REL_SUCCESS);
assertEquals(1, results.size(), "Wrong count, received " + results.size());
assertRecordCount(results, 1);
}
@Test
public void testSameRowCfValueInCq() throws Exception {
TestRunner runner = createTestEnvironment("2019","2019","family1","family2",true,"",null);
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(ScanAccumulo.REL_SUCCESS);
assertEquals(1, results.size(), "Wrong count, received " + results.size());
assertRecordCount(results, 5);
}
@Test
public void testSameRowCfValueInCqWithAuths() throws Exception {
TestRunner runner = createTestEnvironment("2019","2019","family1","family2",true,"abcd",new Authorizations("abcd"));
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(ScanAccumulo.REL_SUCCESS);
assertEquals(1, results.size(), "Wrong count, received " + results.size());
assertRecordCount(results, 5);
}
@Test
public void testSameRowCfValueInCqErrorCfEnd() {
assertThrows(AssertionError.class, () -> {
TestRunner runner = createTestEnvironment("2019","2019","family1","",true,"",null);
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(ScanAccumulo.REL_SUCCESS);
assertEquals(1, results.size(), "Wrong count, received " + results.size());
assertRecordCount(results, 5);
});
}
@Test
public void testSameRowCfValueInCqErrorCf() {
assertThrows(AssertionError.class, () -> {
TestRunner runner = createTestEnvironment("2019", "2019", "", "family2", true, "", null);
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(ScanAccumulo.REL_SUCCESS);
assertEquals(1, results.size(), "Wrong count, received " + results.size());
assertRecordCount(results, 5);
});
}
@Test
public void testSameRowCfValueInCqErrorNotLess() {
assertThrows(AssertionError.class, () -> {
TestRunner runner = createTestEnvironment("2019", "2019", "family1", "family1", true, "", null);
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(ScanAccumulo.REL_SUCCESS);
assertEquals(1, results.size(), "Wrong count, received " + results.size());
assertRecordCount(results, 5);
});
}
@Test
public void testValueIsPresentByDefault() throws Exception {
TestRunner runner = createTestEnvironment("2019","2019","family1","family2",false,"",null);
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(ScanAccumulo.REL_SUCCESS);
assertEquals(1, results.size(), "Wrong count, received " + results.size());
assertValueInResult(results, "\"Test\"\n");
}
@Test
public void testValueIsNotPresentWhenDisabled() throws Exception {
TestRunner runner = createTestEnvironment("2019", "2019", "family1", "family2", false, "", null);
runner.setProperty(ScanAccumulo.VALUE_INCLUDED_IN_RESULT, "False");
runner.run();
List<MockFlowFile> results = runner.getFlowFilesForRelationship(ScanAccumulo.REL_SUCCESS);
assertEquals(1, results.size(), "Wrong count, received " + results.size());
assertValueInResult(results, "\n");
}
private TestRunner createTestEnvironment(String row, String endrow, String cf, String endcf, boolean valueincq,
String auths, Authorizations defaultVis) throws Exception {
String tableName = createTable(defaultVis);
TestRunner runner = configureTestRunner(row, endrow, cf, endcf, auths, tableName);
generateTestData(row,tableName,valueincq, auths);
return runner;
}
private String createTable(Authorizations defaultVis) throws AccumuloException, AccumuloSecurityException, TableExistsException {
String tableName = UUID.randomUUID().toString();
tableName=tableName.replace("-","a");
if (null != defaultVis)
accumulo.getConnector("root","password").securityOperations().changeUserAuthorizations("root", defaultVis);
accumulo.getConnector("root","password").tableOperations().create(tableName);
return tableName;
}
private TestRunner configureTestRunner(String row, String endrow, String cf, String endcf, String auths, String tableName) throws InitializationException {
TestRunner runner = getTestRunner();
runner.setProperty(ScanAccumulo.TABLE_NAME, tableName);
runner.setProperty(ScanAccumulo.START_KEY, row);
if (!cf.isEmpty())
runner.setProperty(ScanAccumulo.COLUMNFAMILY, cf);
if (!endcf.isEmpty())
runner.setProperty(ScanAccumulo.COLUMNFAMILY_END, endcf);
runner.setProperty(ScanAccumulo.AUTHORIZATIONS, auths);
runner.setProperty(ScanAccumulo.END_KEY, endrow);
return runner;
}
private TestRunner getTestRunner() throws InitializationException {
final TestRunner runner = TestRunners.newTestRunner(ScanAccumulo.class);
runner.enforceReadStreamsClosed(false);
MockAccumuloService.getService(runner,accumulo.getZooKeepers(),accumulo.getInstanceName(),"root","password");
runner.addControllerService("parser", PARSER);
runner.enableControllerService(PARSER);
runner.setProperty(ScanAccumulo.RECORD_WRITER,"parser");
return runner;
}
private void generateTestData(String definedRow, String table, boolean valueincq, String cv)
throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
BatchWriterConfig writerConfig = new BatchWriterConfig();
writerConfig.setMaxWriteThreads(2);
writerConfig.setMaxMemory(1024*1024);
MultiTableBatchWriter writer = accumulo.getConnector("root","password").createMultiTableBatchWriter(writerConfig);
long ts = System.currentTimeMillis();
ColumnVisibility colViz = new ColumnVisibility();
if (null != cv)
colViz = new ColumnVisibility(cv);
for (int x = 0; x < 5; x++) {
final String row = definedRow.isEmpty() ? UUID.randomUUID().toString() : definedRow;
final String cq = UUID.randomUUID().toString();
Text keyCq = new Text("code");
if (valueincq){
keyCq.append(cq.getBytes(),0,cq.length());
}
Mutation m = new Mutation(row);
m.put(new Text(DEFAULT_COLUMN_FAMILY),new Text(keyCq),colViz,ts, new Value("Test"));
writer.getBatchWriter(table).addMutation(m);
}
writer.flush();
}
private void assertRecordCount(List<MockFlowFile> results, int expected) {
for (MockFlowFile ff : results){
String attr = ff.getAttribute("record.count");
assertEquals(expected, Integer.valueOf(attr).intValue());
}
}
private void assertValueInResult(List<MockFlowFile> results, String expected) {
for (MockFlowFile ff : results) {
assertEquals(expected, ff.getContent());
}
}
}

View File

@ -25,12 +25,11 @@ import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestAccumuloService {
@ -47,17 +46,12 @@ public class TestAccumuloService {
private TestRunner runner;
private AccumuloService accumuloService;
@Mock
private KerberosCredentialsService credentialService;
@Mock
private KerberosUserService kerberosUserService;
@Mock
private Processor dummyProcessor;
private final KerberosCredentialsService credentialService = mock(KerberosCredentialsService.class);
private final KerberosUserService kerberosUserService = mock(KerberosUserService.class);
private final Processor dummyProcessor = mock(Processor.class);
@BeforeEach
public void init() {
MockitoAnnotations.initMocks(this);
runner = TestRunners.newTestRunner(dummyProcessor);
accumuloService = new AccumuloService();

View File

@ -36,7 +36,7 @@ public class AirtableRestService {
public static final String API_V0_BASE_URL = "https://api.airtable.com/v0";
private static final int TOO_MANY_REQUESTS = 429;
private static final Range<Integer> SUCCESSFUL_RESPONSE_RANGE = Range.between(200, 299);
private static final Range<Integer> SUCCESSFUL_RESPONSE_RANGE = Range.of(200, 299);
private final WebClientServiceProvider webClientServiceProvider;
private final String apiUrl;

View File

@ -16,6 +16,11 @@
*/
package org.apache.nifi.jasn1;
import java.io.FileNotFoundException;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.StringJoiner;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.controller.ControllerServiceInitializationContext;
import org.apache.nifi.logging.ComponentLog;
@ -30,12 +35,6 @@ import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.io.FileNotFoundException;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.StringJoiner;
import static org.apache.nifi.jasn1.JASN1Reader.ASN_FILES;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
@ -44,9 +43,9 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Answers.RETURNS_DEEP_STUBS;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ -58,24 +57,24 @@ public class JASN1ReaderTest {
private ControllerServiceInitializationContext context;
@Mock
private ComponentLog logger;
private AutoCloseable mockCloseable;
@BeforeEach
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
mockCloseable = MockitoAnnotations.openMocks(this);
testSubject = new JASN1Reader();
when(context.getLogger()).thenReturn(logger);
testSubject.initialize(context);
}
@AfterEach
public void tearDown() {
public void tearDown() throws Exception {
if (mockCloseable != null) {
mockCloseable.close();
}
assertTrue(testSubject.asnOutDir.toFile().exists());
testSubject.deleteAsnOutDir();
assertTrue(!testSubject.asnOutDir.toFile().exists());
}

View File

@ -33,7 +33,7 @@ public class GenericApiGatewayClient extends AmazonWebServiceClient {
private final JsonResponseHandler<GenericApiGatewayResponse> responseHandler;
private final HttpResponseHandler<AmazonServiceException> errorResponseHandler;
private final AWSCredentialsProvider credentials;
private String apiKey;
private final String apiKey;
private final AWS4Signer signer;
GenericApiGatewayClient(ClientConfiguration clientConfiguration, String endpoint, Region region,
@ -71,7 +71,7 @@ public class GenericApiGatewayClient extends AmazonWebServiceClient {
private GenericApiGatewayResponse execute(HttpMethodName method, String resourcePath, Map<String, String> headers, Map<String,List<String>> parameters, InputStream content) {
final ExecutionContext executionContext = buildExecutionContext();
DefaultRequest request = new DefaultRequest(API_GATEWAY_SERVICE_NAME);
DefaultRequest<?> request = new DefaultRequest<>(API_GATEWAY_SERVICE_NAME);
request.setHttpMethod(method);
request.setContent(content);
request.setEndpoint(this.endpoint);

View File

@ -17,6 +17,17 @@
package org.apache.nifi.processors.aws.wag;
import com.amazonaws.http.AmazonHttpClient;
import java.io.ByteArrayInputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.apache.http.impl.EnglishReasonPhraseCatalog;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.InputRequirement;
@ -29,6 +40,7 @@ import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.ConfigVerificationResult;
import org.apache.nifi.components.ConfigVerificationResult.Outcome;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.logging.ComponentLog;
@ -42,18 +54,6 @@ import org.apache.nifi.processors.aws.wag.client.GenericApiGatewayRequest;
import org.apache.nifi.processors.aws.wag.client.GenericApiGatewayResponse;
import org.apache.nifi.stream.io.StreamUtils;
import java.io.ByteArrayInputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
@SupportsBatching
@InputRequirement(Requirement.INPUT_ALLOWED)
@Tags({"Amazon", "AWS", "Client", "Gateway-API", "Rest", "http", "https"})
@ -67,10 +67,8 @@ import java.util.concurrent.TimeUnit;
@WritesAttribute(attribute = "aws.gateway.api.tx.id", description = "The transaction ID that is returned after reading the response"),
@WritesAttribute(attribute = "aws.gateway.api.java.exception.class", description = "The Java exception class raised when the processor fails"),
@WritesAttribute(attribute = "aws.gateway.api.java.exception.message", description = "The Java exception message raised when the processor fails"),})
@DynamicProperty(name = "Header Name", value = "Attribute Expression Language", supportsExpressionLanguage = true, description =
"Send request header "
+ "with a key matching the Dynamic Property Key and a value created by evaluating the Attribute Expression Language set in the value "
+ "of the Dynamic Property.")
@DynamicProperty(name = "Header Name", value = "Attribute Expression Language", expressionLanguageScope = ExpressionLanguageScope.FLOWFILE_ATTRIBUTES,
description = "Send request header with a key matching the Dynamic Property Key and a value created by evaluating the Attribute Expression Language set in the value of the Dynamic Property.")
public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
private static final Set<String> IDEMPOTENT_METHODS = new HashSet<>(Arrays.asList("GET", "HEAD", "OPTIONS"));

View File

@ -26,6 +26,9 @@ import com.amazonaws.services.kinesis.clientlibrary.types.ExtendedSequenceNumber
import com.amazonaws.services.kinesis.clientlibrary.types.InitializationInput;
import com.amazonaws.services.kinesis.clientlibrary.types.ShutdownInput;
import com.amazonaws.services.kinesis.model.Record;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessSessionFactory;
@ -38,12 +41,6 @@ import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
@ -51,6 +48,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@ -62,23 +60,16 @@ public class TestAbstractKinesisRecordProcessor {
private final TestRunner runner = TestRunners.newTestRunner(ConsumeKinesisStream.class);
@Mock
private ProcessSessionFactory processSessionFactory;
private final ProcessSessionFactory processSessionFactory = mock(ProcessSessionFactory.class);
private final MockProcessSession session = new MockProcessSession(new SharedSessionState(runner.getProcessor(), new AtomicLong(0)), runner.getProcessor());
private AbstractKinesisRecordProcessor fixture;
private final IRecordProcessorCheckpointer checkpointer = mock(IRecordProcessorCheckpointer.class);
@Mock
private IRecordProcessorCheckpointer checkpointer;
@Mock
private Record kinesisRecord;
private final Record kinesisRecord = mock(Record.class);
@BeforeEach
public void setUp() {
MockitoAnnotations.initMocks(this);
when(processSessionFactory.createSession()).thenReturn(session);
// default test fixture will try operations twice with very little wait in between

View File

@ -21,19 +21,6 @@ import com.amazonaws.services.kinesis.clientlibrary.exceptions.ShutdownException
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorCheckpointer;
import com.amazonaws.services.kinesis.clientlibrary.types.ProcessRecordsInput;
import com.amazonaws.services.kinesis.model.Record;
import org.apache.nifi.processor.ProcessSessionFactory;
import org.apache.nifi.processors.aws.kinesis.stream.ConsumeKinesisStream;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.MockProcessSession;
import org.apache.nifi.util.SharedSessionState;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
@ -46,11 +33,22 @@ import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.nifi.processor.ProcessSessionFactory;
import org.apache.nifi.processors.aws.kinesis.stream.ConsumeKinesisStream;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.MockProcessSession;
import org.apache.nifi.util.SharedSessionState;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@ -62,24 +60,18 @@ public class TestKinesisRecordProcessorRaw {
private final TestRunner runner = TestRunners.newTestRunner(ConsumeKinesisStream.class);
@Mock
private ProcessSessionFactory processSessionFactory;
private final ProcessSessionFactory processSessionFactory = mock(ProcessSessionFactory.class);
private final SharedSessionState sharedState = new SharedSessionState(runner.getProcessor(), new AtomicLong(0));
private final MockProcessSession session = new MockProcessSession(sharedState, runner.getProcessor());
private AbstractKinesisRecordProcessor fixture;
@Mock
private IRecordProcessorCheckpointer checkpointer;
@Mock
private Record kinesisRecord;
private final IRecordProcessorCheckpointer checkpointer = mock(IRecordProcessorCheckpointer.class);
private final Record kinesisRecord = mock(Record.class);
@BeforeEach
public void setUp() {
MockitoAnnotations.initMocks(this);
// default test fixture will try operations twice with very little wait in between
fixture = new KinesisRecordProcessorRaw(processSessionFactory, runner.getLogger(), "kinesis-test",
"endpoint-prefix", null, 10_000L, 1L, 2, DATE_TIME_FORMATTER);

View File

@ -21,6 +21,18 @@ import com.amazonaws.services.kinesis.clientlibrary.exceptions.ShutdownException
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorCheckpointer;
import com.amazonaws.services.kinesis.clientlibrary.types.ProcessRecordsInput;
import com.amazonaws.services.kinesis.model.Record;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.json.JsonRecordSetWriter;
import org.apache.nifi.json.JsonTreeReader;
@ -39,27 +51,13 @@ import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@ -71,8 +69,7 @@ public class TestKinesisRecordProcessorRecord {
private final TestRunner runner = TestRunners.newTestRunner(ConsumeKinesisStream.class);
@Mock
private ProcessSessionFactory processSessionFactory;
private final ProcessSessionFactory processSessionFactory = mock(ProcessSessionFactory.class);
private final SharedSessionState sharedState = new SharedSessionState(runner.getProcessor(), new AtomicLong(0));
private final MockProcessSession session = new MockProcessSession(sharedState, runner.getProcessor());
@ -81,16 +78,11 @@ public class TestKinesisRecordProcessorRecord {
private final RecordReaderFactory reader = new JsonTreeReader();
private final RecordSetWriterFactory writer = new JsonRecordSetWriter();
@Mock
private IRecordProcessorCheckpointer checkpointer;
@Mock
private Record kinesisRecord;
private final IRecordProcessorCheckpointer checkpointer = mock(IRecordProcessorCheckpointer.class);
private final Record kinesisRecord = mock(Record.class);
@BeforeEach
public void setUp() throws InitializationException {
MockitoAnnotations.initMocks(this);
runner.addControllerService("record-reader", reader);
runner.setProperty(reader, SchemaAccessUtils.SCHEMA_ACCESS_STRATEGY, SchemaInferenceUtil.INFER_SCHEMA.getValue());
runner.enableControllerService(reader);

View File

@ -16,6 +16,18 @@
*/
package org.apache.nifi.authorization.azure;
import com.google.gson.JsonObject;
import com.microsoft.graph.core.ClientException;
import com.microsoft.graph.models.extensions.IGraphServiceClient;
import com.microsoft.graph.options.Option;
import com.microsoft.graph.options.QueryOption;
import com.microsoft.graph.requests.extensions.GraphServiceClient;
import com.microsoft.graph.requests.extensions.IGroupCollectionPage;
import com.microsoft.graph.requests.extensions.IGroupCollectionRequest;
import com.microsoft.graph.requests.extensions.IGroupCollectionRequestBuilder;
import com.microsoft.graph.requests.extensions.IUserCollectionWithReferencesPage;
import com.microsoft.graph.requests.extensions.IUserCollectionWithReferencesRequest;
import com.microsoft.graph.requests.extensions.IUserCollectionWithReferencesRequestBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
@ -29,20 +41,6 @@ import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import com.google.gson.JsonObject;
import com.microsoft.graph.core.ClientException;
import com.microsoft.graph.models.extensions.IGraphServiceClient;
import com.microsoft.graph.options.Option;
import com.microsoft.graph.options.QueryOption;
import com.microsoft.graph.requests.extensions.GraphServiceClient;
import com.microsoft.graph.requests.extensions.IGroupCollectionPage;
import com.microsoft.graph.requests.extensions.IGroupCollectionRequest;
import com.microsoft.graph.requests.extensions.IGroupCollectionRequestBuilder;
import com.microsoft.graph.requests.extensions.IUserCollectionWithReferencesPage;
import com.microsoft.graph.requests.extensions.IUserCollectionWithReferencesRequest;
import com.microsoft.graph.requests.extensions.IUserCollectionWithReferencesRequestBuilder;
import org.apache.nifi.authorization.AuthorizerConfigurationContext;
import org.apache.nifi.authorization.Group;
import org.apache.nifi.authorization.User;
@ -212,7 +210,7 @@ public class AzureGraphUserGroupProvider implements UserGroupProvider {
.graphScope(graphScope)
.build();
graphClient = GraphServiceClient.builder().authenticationProvider(authProvider).buildClient();
if ( ! StringUtils.isBlank(graphEndpoint)) {
if (!StringUtils.isBlank(graphEndpoint)) {
graphClient.setServiceRoot(graphEndpoint);
}
} catch (final ClientException e) {

View File

@ -16,22 +16,6 @@
*/
package org.apache.nifi.processors.azure.eventhub;
import java.time.Duration;
import java.time.Instant;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Optional;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import com.azure.core.amqp.AmqpClientOptions;
import com.azure.core.amqp.AmqpTransportType;
import com.azure.core.credential.AzureNamedKeyCredential;
@ -43,7 +27,20 @@ import com.azure.messaging.eventhubs.EventHubConsumerClient;
import com.azure.messaging.eventhubs.models.EventPosition;
import com.azure.messaging.eventhubs.models.PartitionContext;
import com.azure.messaging.eventhubs.models.PartitionEvent;
import java.time.Duration;
import java.time.Instant;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
import org.apache.nifi.annotation.behavior.WritesAttribute;
@ -68,10 +65,10 @@ import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processors.azure.eventhub.utils.AzureEventHubUtils;
import org.apache.nifi.scheduling.ExecutionNode;
import org.apache.nifi.shared.azure.eventhubs.AzureEventHubComponent;
import org.apache.nifi.util.StopWatch;
import org.apache.nifi.processors.azure.eventhub.utils.AzureEventHubUtils;
@Tags({"azure", "microsoft", "cloud", "eventhub", "events", "streaming", "streams"})
@CapabilityDescription("Receives messages from Microsoft Azure Event Hubs without reliable checkpoint tracking. "
@ -120,14 +117,6 @@ public class GetAzureEventHub extends AbstractProcessor implements AzureEventHub
static final PropertyDescriptor POLICY_PRIMARY_KEY = AzureEventHubUtils.POLICY_PRIMARY_KEY;
static final PropertyDescriptor USE_MANAGED_IDENTITY = AzureEventHubUtils.USE_MANAGED_IDENTITY;
@Deprecated
static final PropertyDescriptor NUM_PARTITIONS = new PropertyDescriptor.Builder()
.name("Number of Event Hub Partitions")
.description("This property is deprecated and no longer used.")
.addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.required(false)
.build();
static final PropertyDescriptor CONSUMER_GROUP = new PropertyDescriptor.Builder()
.name("Event Hub Consumer Group")
.displayName("Consumer Group")
@ -173,7 +162,7 @@ public class GetAzureEventHub extends AbstractProcessor implements AzureEventHub
private final static Set<Relationship> relationships;
static {
propertyDescriptors = Collections.unmodifiableList(Arrays.asList(
propertyDescriptors = List.of(
NAMESPACE,
EVENT_HUB_NAME,
SERVICE_BUS_ENDPOINT,
@ -181,12 +170,11 @@ public class GetAzureEventHub extends AbstractProcessor implements AzureEventHub
ACCESS_POLICY,
POLICY_PRIMARY_KEY,
USE_MANAGED_IDENTITY,
NUM_PARTITIONS,
CONSUMER_GROUP,
ENQUEUE_TIME,
RECEIVER_FETCH_SIZE,
RECEIVER_FETCH_TIMEOUT
));
);
relationships = Collections.singleton(REL_SUCCESS);
}

View File

@ -20,15 +20,24 @@ import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.CodecRegistry;
import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.DataType;
import com.datastax.driver.core.JdkSSLOptions;
import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.ProtocolOptions;
import com.datastax.driver.core.RemoteEndpointAwareJdkSSLOptions;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.SSLOptions;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.TypeCodec;
import com.datastax.driver.core.exceptions.AuthenticationException;
import com.datastax.driver.core.exceptions.NoHostAvailableException;
import com.datastax.driver.extras.codecs.arrays.ObjectArrayCodec;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import javax.net.ssl.SSLContext;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.commons.lang3.StringUtils;
@ -48,15 +57,6 @@ import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.security.util.ClientAuth;
import org.apache.nifi.ssl.SSLContextService;
import javax.net.ssl.SSLContext;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
/**
* AbstractCassandraProcessor is a base class for Cassandra processors and contains logic and variables common to most
* processors integrating with Apache Cassandra.
@ -319,19 +319,22 @@ public abstract class AbstractCassandraProcessor extends AbstractProcessor {
String username, String password, String compressionType) {
Cluster.Builder builder = Cluster.builder().addContactPointsWithPorts(contactPoints);
if (sslContext != null) {
JdkSSLOptions sslOptions = JdkSSLOptions.builder()
final SSLOptions sslOptions = RemoteEndpointAwareJdkSSLOptions.builder()
.withSSLContext(sslContext)
.build();
builder = builder.withSSL(sslOptions);
if(ProtocolOptions.Compression.SNAPPY.equals(compressionType)) {
if (ProtocolOptions.Compression.SNAPPY.name().equals(compressionType)) {
builder = builder.withCompression(ProtocolOptions.Compression.SNAPPY);
} else if(ProtocolOptions.Compression.LZ4.equals(compressionType)) {
} else if (ProtocolOptions.Compression.LZ4.name().equals(compressionType)) {
builder = builder.withCompression(ProtocolOptions.Compression.LZ4);
}
}
if (username != null && password != null) {
builder = builder.withCredentials(username, password);
}
return builder.build();
}
@ -438,38 +441,22 @@ public abstract class AbstractCassandraProcessor extends AbstractProcessor {
*
* @param dataType The data type of the field
*/
protected static Schema getSchemaForType(String dataType) {
SchemaBuilder.TypeBuilder<Schema> typeBuilder = SchemaBuilder.builder();
Schema returnSchema;
switch (dataType) {
case "string":
returnSchema = typeBuilder.stringType();
break;
case "boolean":
returnSchema = typeBuilder.booleanType();
break;
case "int":
returnSchema = typeBuilder.intType();
break;
case "long":
returnSchema = typeBuilder.longType();
break;
case "float":
returnSchema = typeBuilder.floatType();
break;
case "double":
returnSchema = typeBuilder.doubleType();
break;
case "bytes":
returnSchema = typeBuilder.bytesType();
break;
default:
throw new IllegalArgumentException("Unknown Avro primitive type: " + dataType);
}
protected static Schema getSchemaForType(final String dataType) {
final SchemaBuilder.TypeBuilder<Schema> typeBuilder = SchemaBuilder.builder();
final Schema returnSchema = switch (dataType) {
case "string" -> typeBuilder.stringType();
case "boolean" -> typeBuilder.booleanType();
case "int" -> typeBuilder.intType();
case "long" -> typeBuilder.longType();
case "float" -> typeBuilder.floatType();
case "double" -> typeBuilder.doubleType();
case "bytes" -> typeBuilder.bytesType();
default -> throw new IllegalArgumentException("Unknown Avro primitive type: " + dataType);
};
return returnSchema;
}
protected static String getPrimitiveAvroTypeFromCassandraType(DataType dataType) {
protected static String getPrimitiveAvroTypeFromCassandraType(final DataType dataType) {
// Map types from Cassandra to Avro where possible
if (dataType.equals(DataType.ascii())
|| dataType.equals(DataType.text())

View File

@ -17,19 +17,19 @@
package org.apache.nifi.processors.cassandra;
import com.datastax.driver.core.querybuilder.Insert;
import org.apache.nifi.serialization.record.RecordFieldType;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.util.Tuple;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import org.apache.nifi.serialization.record.RecordFieldType;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.util.Tuple;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.anyString;
@ -40,14 +40,21 @@ public class PutCassandraRecordInsertTest {
@Mock
private RecordSchema schema;
private AutoCloseable mockCloseable;
@BeforeEach
public void setUp() {
MockitoAnnotations.initMocks(this);
mockCloseable = MockitoAnnotations.openMocks(this);
testSubject = new PutCassandraRecord();
}
@AfterEach
public void closeMock() throws Exception {
if (mockCloseable != null) {
mockCloseable.close();
}
}
@Test
public void testGenerateInsert() {
testGenerateInsert(

View File

@ -17,36 +17,32 @@
package org.apache.nifi.processors.cassandra;
import com.datastax.driver.core.Statement;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.util.Tuple;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.util.Tuple;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class PutCassandraRecordUpdateTest {
private PutCassandraRecord testSubject;
@Mock
private RecordSchema schema;
private final RecordSchema schema = mock(RecordSchema.class);
@BeforeEach
public void setUp() {
MockitoAnnotations.initMocks(this);
testSubject = new PutCassandraRecord();
}
@Test
public void testGenerateUpdateWithEmptyKeyList() {
Stream.of("", ",", ",,,").forEach(updateKeys -> testGenerateUpdate(

View File

@ -18,15 +18,15 @@ package org.apache.nifi.service;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.JdkSSLOptions;
import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.ProtocolOptions;
import com.datastax.driver.core.RemoteEndpointAwareJdkSSLOptions;
import com.datastax.driver.core.SSLOptions;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.SocketOptions;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import javax.net.ssl.SSLContext;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
@ -136,7 +136,7 @@ public class CassandraSessionProvider extends AbstractControllerService implemen
static final PropertyDescriptor CONNECT_TIMEOUT_MS = new PropertyDescriptor.Builder()
.name("connect-timeout-ms")
.displayName("Connect Timout (ms)")
.displayName("Connect Timeout (ms)")
.description("Connection timeout (in milliseconds). 0 means no timeout. If no value is set, the underlying default will be used.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
@ -237,18 +237,11 @@ public class CassandraSessionProvider extends AbstractControllerService implemen
password = null;
}
PropertyValue readTimeoutMillisProperty = context.getProperty(READ_TIMEOUT_MS).evaluateAttributeExpressions();
Optional<Integer> readTimeoutMillisOptional = Optional.ofNullable(readTimeoutMillisProperty)
.filter(PropertyValue::isSet)
.map(PropertyValue::asInteger);
PropertyValue connectTimeoutMillisProperty = context.getProperty(CONNECT_TIMEOUT_MS).evaluateAttributeExpressions();
Optional<Integer> connectTimeoutMillisOptional = Optional.ofNullable(connectTimeoutMillisProperty)
.filter(PropertyValue::isSet)
.map(PropertyValue::asInteger);
final Integer readTimeoutMillis = context.getProperty(READ_TIMEOUT_MS).evaluateAttributeExpressions().asInteger();
final Integer connectTimeoutMillis = context.getProperty(CONNECT_TIMEOUT_MS).evaluateAttributeExpressions().asInteger();
// Create the cluster and connect to it
Cluster newCluster = createCluster(contactPoints, sslContext, username, password, compressionType, readTimeoutMillisOptional, connectTimeoutMillisOptional);
Cluster newCluster = createCluster(contactPoints, sslContext, username, password, compressionType, readTimeoutMillis, connectTimeoutMillis);
PropertyValue keyspaceProperty = context.getProperty(KEYSPACE).evaluateAttributeExpressions();
final Session newSession;
if (keyspaceProperty != null) {
@ -285,13 +278,13 @@ public class CassandraSessionProvider extends AbstractControllerService implemen
return contactPoints;
}
private Cluster createCluster(List<InetSocketAddress> contactPoints, SSLContext sslContext,
String username, String password, String compressionType,
Optional<Integer> readTimeoutMillisOptional, Optional<Integer> connectTimeoutMillisOptional) {
Cluster.Builder builder = Cluster.builder().addContactPointsWithPorts(contactPoints);
private Cluster createCluster(final List<InetSocketAddress> contactPoints, final SSLContext sslContext,
final String username, final String password, final String compressionType,
final Integer readTimeoutMillis, final Integer connectTimeoutMillis) {
Cluster.Builder builder = Cluster.builder().addContactPointsWithPorts(contactPoints);
if (sslContext != null) {
JdkSSLOptions sslOptions = JdkSSLOptions.builder()
final SSLOptions sslOptions = RemoteEndpointAwareJdkSSLOptions.builder()
.withSSLContext(sslContext)
.build();
builder = builder.withSSL(sslOptions);
@ -301,15 +294,19 @@ public class CassandraSessionProvider extends AbstractControllerService implemen
builder = builder.withCredentials(username, password);
}
if(ProtocolOptions.Compression.SNAPPY.equals(compressionType)) {
if (ProtocolOptions.Compression.SNAPPY.name().equals(compressionType)) {
builder = builder.withCompression(ProtocolOptions.Compression.SNAPPY);
} else if(ProtocolOptions.Compression.LZ4.equals(compressionType)) {
} else if (ProtocolOptions.Compression.LZ4.name().equals(compressionType)) {
builder = builder.withCompression(ProtocolOptions.Compression.LZ4);
}
SocketOptions socketOptions = new SocketOptions();
readTimeoutMillisOptional.ifPresent(socketOptions::setReadTimeoutMillis);
connectTimeoutMillisOptional.ifPresent(socketOptions::setConnectTimeoutMillis);
if (readTimeoutMillis != null) {
socketOptions.setReadTimeoutMillis(readTimeoutMillis);
}
if (connectTimeoutMillis != null) {
socketOptions.setConnectTimeoutMillis(connectTimeoutMillis);
}
builder.withSocketOptions(socketOptions);

View File

@ -27,6 +27,33 @@ import com.github.shyiko.mysql.binlog.event.QueryEventData;
import com.github.shyiko.mysql.binlog.event.RotateEventData;
import com.github.shyiko.mysql.binlog.event.TableMapEventData;
import com.github.shyiko.mysql.binlog.network.SSLMode;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.DriverPropertyInfo;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import javax.net.ssl.SSLContext;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.PrimaryNodeOnly;
@ -47,20 +74,20 @@ import org.apache.nifi.cdc.event.ColumnDefinition;
import org.apache.nifi.cdc.event.RowEventException;
import org.apache.nifi.cdc.event.TableInfo;
import org.apache.nifi.cdc.event.TableInfoCacheKey;
import org.apache.nifi.cdc.event.io.EventWriterConfiguration;
import org.apache.nifi.cdc.event.io.FlowFileEventWriteStrategy;
import org.apache.nifi.cdc.mysql.event.BinlogEventInfo;
import org.apache.nifi.cdc.mysql.event.BinlogEventListener;
import org.apache.nifi.cdc.mysql.event.BinlogLifecycleListener;
import org.apache.nifi.cdc.mysql.event.DataCaptureState;
import org.apache.nifi.cdc.event.io.EventWriterConfiguration;
import org.apache.nifi.cdc.event.io.FlowFileEventWriteStrategy;
import org.apache.nifi.cdc.mysql.event.RawBinlogEvent;
import org.apache.nifi.cdc.mysql.event.io.AbstractBinlogEventWriter;
import org.apache.nifi.cdc.mysql.event.handler.BeginEventHandler;
import org.apache.nifi.cdc.mysql.event.handler.CommitEventHandler;
import org.apache.nifi.cdc.mysql.event.handler.DDLEventHandler;
import org.apache.nifi.cdc.mysql.event.handler.DeleteEventHandler;
import org.apache.nifi.cdc.mysql.event.handler.InsertEventHandler;
import org.apache.nifi.cdc.mysql.event.handler.UpdateEventHandler;
import org.apache.nifi.cdc.mysql.event.io.AbstractBinlogEventWriter;
import org.apache.nifi.cdc.mysql.processors.ssl.BinaryLogSSLSocketFactory;
import org.apache.nifi.cdc.mysql.processors.ssl.ConnectionPropertiesProvider;
import org.apache.nifi.cdc.mysql.processors.ssl.StandardConnectionPropertiesProvider;
@ -90,34 +117,6 @@ import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.security.util.TlsConfiguration;
import org.apache.nifi.ssl.SSLContextService;
import javax.net.ssl.SSLContext;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.DriverPropertyInfo;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import static com.github.shyiko.mysql.binlog.event.EventType.DELETE_ROWS;
import static com.github.shyiko.mysql.binlog.event.EventType.EXT_DELETE_ROWS;
import static com.github.shyiko.mysql.binlog.event.EventType.EXT_WRITE_ROWS;
@ -1345,7 +1344,7 @@ public class CaptureChangeMySQL extends AbstractSessionFactoryProcessor {
if (clazz == null) {
throw new InitializationException("Can't load Database Driver " + drvName);
}
final Driver driver = (Driver) clazz.newInstance();
final Driver driver = (Driver) clazz.getDeclaredConstructor().newInstance();
DriverManager.registerDriver(new DriverShim(driver));
} catch (final InitializationException e) {

View File

@ -16,6 +16,20 @@
*/
package org.apache.nifi.processors.email.smtp;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.io.IOUtils;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
@ -32,21 +46,6 @@ import org.subethamail.smtp.RejectException;
import org.subethamail.smtp.TooMuchDataException;
import org.subethamail.smtp.server.SMTPServer;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* A simple consumer that provides a bridge between 'push' message distribution
* provided by {@link SMTPServer} and NiFi polling scheduler mechanism.
@ -140,10 +139,9 @@ public class SmtpConsumer implements MessageHandler {
final Certificate[] tlsPeerCertificates = context.getTlsPeerCertificates();
if (tlsPeerCertificates != null) {
for (int i = 0; i < tlsPeerCertificates.length; i++) {
if (tlsPeerCertificates[i] instanceof X509Certificate) {
X509Certificate x509Cert = (X509Certificate) tlsPeerCertificates[i];
if (tlsPeerCertificates[i] instanceof final X509Certificate x509Cert) {
attributes.put("smtp.certificate." + i + ".serial", x509Cert.getSerialNumber().toString());
attributes.put("smtp.certificate." + i + ".subjectName", x509Cert.getSubjectDN().getName());
attributes.put("smtp.certificate." + i + ".subjectName", x509Cert.getSubjectX500Principal().getName());
}
}
}

View File

@ -21,7 +21,6 @@ import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jr.ob.JSON;
import com.maxmind.geoip2.model.CityResponse;
import java.util.Collections;
public class GeoEnrichTestUtils {
@ -119,7 +118,7 @@ public class GeoEnrichTestUtils {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
return new ObjectMapper().reader(CityResponse.class).with(inject).readValue(maxMindCityResponse);
return new ObjectMapper().readerFor(CityResponse.class).with(inject).readValue(maxMindCityResponse);
}
public static CityResponse getNullLatAndLongCityResponse() throws Exception {
@ -214,6 +213,6 @@ public class GeoEnrichTestUtils {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
return new ObjectMapper().reader(CityResponse.class).with(inject).readValue(maxMindCityResponse);
return new ObjectMapper().readerFor(CityResponse.class).with(inject).readValue(maxMindCityResponse);
}
}

View File

@ -16,29 +16,6 @@
*/
package org.apache.nifi.util.db;
import org.apache.avro.Conversions;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileStream;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.util.Utf8;
import org.apache.commons.io.input.ReaderInputStream;
import org.apache.derby.jdbc.EmbeddedDriver;
import org.apache.nifi.util.file.FileUtils;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.CharArrayReader;
@ -75,6 +52,28 @@ import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiConsumer;
import java.util.stream.IntStream;
import org.apache.avro.Conversions;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileStream;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.util.Utf8;
import org.apache.commons.io.input.ReaderInputStream;
import org.apache.derby.jdbc.EmbeddedDriver;
import org.apache.nifi.util.file.FileUtils;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.nifi.util.db.JdbcCommon.MASKED_LOG_VALUE;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ -540,7 +539,7 @@ public class TestJdbcCommon {
IntStream.range(0, 4002).forEach((i) -> buffer[i] = String.valueOf(i % 10).charAt(0));
// Put a zero-byte in to test the buffer building logic
buffer[1] = 0;
ReaderInputStream isr = new ReaderInputStream(new CharArrayReader(buffer), Charset.defaultCharset());
final ReaderInputStream isr = ReaderInputStream.builder().setCharset(Charset.defaultCharset()).setReader(new CharArrayReader(buffer)).get();
// - set the value of the input parameter to the input stream
ps.setAsciiStream(2, isr, 4002);

View File

@ -16,6 +16,14 @@
*/
package org.apache.nifi.dbcp;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.SQLException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.lifecycle.OnDisabled;
@ -35,14 +43,6 @@ import org.apache.nifi.security.krb.KerberosAction;
import org.apache.nifi.security.krb.KerberosLoginException;
import org.apache.nifi.security.krb.KerberosUser;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.apache.nifi.components.ConfigVerificationResult.Outcome.FAILED;
import static org.apache.nifi.components.ConfigVerificationResult.Outcome.SUCCESSFUL;
import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_DRIVER_LOCATION;
@ -161,14 +161,14 @@ public abstract class AbstractDBCPConnectionPool extends AbstractControllerServi
final Driver driver = getDriver(configuration.getDriverName(), configuration.getUrl());
basicDataSource.setDriver(driver);
basicDataSource.setMaxWaitMillis(configuration.getMaxWaitMillis());
basicDataSource.setMaxWait(Duration.ofMillis(configuration.getMaxWaitMillis()));
basicDataSource.setMaxTotal(configuration.getMaxTotal());
basicDataSource.setMinIdle(configuration.getMinIdle());
basicDataSource.setMaxIdle(configuration.getMaxIdle());
basicDataSource.setMaxConnLifetimeMillis(configuration.getMaxConnLifetimeMillis());
basicDataSource.setTimeBetweenEvictionRunsMillis(configuration.getTimeBetweenEvictionRunsMillis());
basicDataSource.setMinEvictableIdleTimeMillis(configuration.getMinEvictableIdleTimeMillis());
basicDataSource.setSoftMinEvictableIdleTimeMillis(configuration.getSoftMinEvictableIdleTimeMillis());
basicDataSource.setMaxConn(Duration.ofMillis(configuration.getMaxConnLifetimeMillis()));
basicDataSource.setDurationBetweenEvictionRuns(Duration.ofMillis(configuration.getTimeBetweenEvictionRunsMillis()));
basicDataSource.setMinEvictableIdle(Duration.ofMillis(configuration.getMinEvictableIdleTimeMillis()));
basicDataSource.setSoftMinEvictableIdle(Duration.ofMillis(configuration.getSoftMinEvictableIdleTimeMillis()));
final String validationQuery = configuration.getValidationQuery();
if (StringUtils.isNotBlank(validationQuery)) {

View File

@ -16,12 +16,10 @@
*/
package org.apache.nifi.dbcp.utils;
import org.apache.commons.dbcp2.BasicDataSource;
import java.util.concurrent.TimeUnit;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.apache.nifi.util.FormatUtils;
import java.util.concurrent.TimeUnit;
public enum DefaultDataSourceValues {
MAX_WAIT_TIME("500 millis") {
@ -31,35 +29,28 @@ public enum DefaultDataSourceValues {
}
},
MAX_TOTAL_CONNECTIONS("8"),
/**
* Copied from {@link GenericObjectPoolConfig#DEFAULT_MIN_IDLE} in Commons-DBCP 2.7.0
*/
MIN_IDLE("0"),
/**
* Copied from {@link GenericObjectPoolConfig#DEFAULT_MAX_IDLE} in Commons-DBCP 2.7.0
*/
MAX_IDLE("8"),
/**
* Copied from private variable {@link BasicDataSource#maxConnLifetimeMillis} in Commons-DBCP 2.7.0
*/
MAX_CONN_LIFETIME("-1"),
/**
* Copied from {@link GenericObjectPoolConfig#DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS} in Commons-DBCP 2.7.0
*/
EVICTION_RUN_PERIOD("-1"),
/**
* Copied from {@link GenericObjectPoolConfig#DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS} in Commons-DBCP 2.7.0
* and converted from 1800000L to "1800000 millis" to "30 mins"
*/
MIN_EVICTABLE_IDLE_TIME("30 mins") {
@Override
public Long getLongValue() {
return (long) FormatUtils.getPreciseTimeDuration(MAX_WAIT_TIME.value, TimeUnit.MINUTES);
}
},
/**
* Copied from {@link GenericObjectPoolConfig#DEFAULT_SOFT_MIN_EVICTABLE_IDLE_TIME_MILLIS} in Commons-DBCP 2.7.0
*/
SOFT_MIN_EVICTABLE_IDLE_TIME("-1");

View File

@ -23,17 +23,15 @@ import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
import javax.security.auth.Subject;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import java.net.UnknownHostException;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.Callable;
/**
* This class provides a very similar authentication scheme and token generation as {@link SPNegoScheme} does.
@ -59,23 +57,20 @@ public class KerberosKeytabSPNegoScheme extends SPNegoScheme {
loginContext.login();
Subject loggedInSubject = loginContext.getSubject();
return Subject.doAs(loggedInSubject, new PrivilegedExceptionAction<byte[]>() {
return Subject.callAs(loggedInSubject, new Callable<byte[]>() {
public byte[] run() throws UnknownHostException, ClassNotFoundException, GSSException,
IllegalAccessException, NoSuchFieldException {
GSSManager gssManager = GSSManager.getInstance();
String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP", authServer);
Oid serviceOid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
GSSName serviceName = gssManager.createName(servicePrincipal, serviceOid);
Oid mechOid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
GSSContext gssContext = gssManager.createContext(serviceName, mechOid, null, 0);
public byte[] call() throws UnknownHostException, GSSException {
final GSSManager gssManager = GSSManager.getInstance();
final String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP", authServer);
final GSSName serviceName = gssManager.createName(servicePrincipal, KerberosUtil.NT_GSS_KRB5_PRINCIPAL_OID);
final GSSContext gssContext = gssManager.createContext(serviceName, KerberosUtil.GSS_KRB5_MECH_OID, null, 0);
gssContext.requestCredDeleg(true);
gssContext.requestMutualAuth(true);
return gssContext.initSecContext(input, 0, input.length);
}
});
} catch (PrivilegedActionException | LoginException e) {
} catch (final LoginException e) {
throw new RuntimeException(e);
}
}

View File

@ -1,92 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hadoop;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.nifi.logging.ComponentLog;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
/**
* Periodically attempts to renew the Kerberos user's ticket for the given UGI.
*
* This class will attempt to call ugi.checkTGTAndReloginFromKeytab() which
* will re-login the user if the ticket expired or is close to expiry. Between
* relogin attempts this thread will sleep for the provided amount of time.
*
*/
@Deprecated
public class KerberosTicketRenewer implements Runnable {
private final UserGroupInformation ugi;
private final long renewalPeriod;
private final ComponentLog logger;
private volatile boolean stopped = false;
/**
* @param ugi
* the user to renew the ticket for
* @param renewalPeriod
* the amount of time in milliseconds to wait between renewal attempts
* @param logger
* the logger from the component that started the renewer
*/
public KerberosTicketRenewer(final UserGroupInformation ugi, final long renewalPeriod, final ComponentLog logger) {
this.ugi = ugi;
this.renewalPeriod = renewalPeriod;
this.logger = logger;
}
@Override
public void run() {
stopped = false;
while (!stopped) {
try {
logger.debug("Invoking renewal attempt for Kerberos ticket");
// While we run this "frequently", the Hadoop implementation will only perform the login at 80% of ticket lifetime.
ugi.doAs((PrivilegedExceptionAction<Void>) () -> {
ugi.checkTGTAndReloginFromKeytab();
return null;
});
} catch (IOException e) {
logger.error("Failed to renew Kerberos ticket", e);
} catch (InterruptedException e) {
logger.error("Interrupted while attempting to renew Kerberos ticket", e);
Thread.currentThread().interrupt();
return;
}
logger.debug("current UGI {}", new Object[]{ugi});
// Wait for a bit before checking again.
try {
Thread.sleep(renewalPeriod);
} catch (InterruptedException e) {
logger.error("Renewal thread interrupted", e);
Thread.currentThread().interrupt();
return;
}
}
}
public void stop() {
stopped = true;
}
}

View File

@ -17,8 +17,6 @@
package org.apache.nifi.processor.util.list;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.nifi.annotation.behavior.Stateful;
import org.apache.nifi.annotation.behavior.TriggerSerially;
@ -398,7 +396,7 @@ public abstract class AbstractListProcessor<T extends ListableEntity> extends Ab
// Check if state already exists for this path. If so, we have already migrated the state.
final StateMap stateMap = context.getStateManager().getState(getStateScope(context));
if (!stateMap.getStateVersion().isPresent()) {
if (stateMap.getStateVersion().isEmpty()) {
try {
// Migrate state from the old way of managing state (distributed cache service and local file)
// to the new mechanism (State Manager).
@ -444,13 +442,11 @@ public abstract class AbstractListProcessor<T extends ListableEntity> extends Ab
}
// remove entry from distributed cache server
if (client != null) {
try {
client.remove(path, new StringSerDe());
} catch (final IOException ioe) {
getLogger().warn("Failed to remove entry from Distributed Cache Service. However, the state has already been migrated to use the new "
+ "State Management service, so the Distributed Cache Service is no longer needed.");
}
try {
client.remove(path, new StringSerDe());
} catch (final IOException ioe) {
getLogger().warn("Failed to remove entry from Distributed Cache Service. However, the state has already been migrated to use the new "
+ "State Management service, so the Distributed Cache Service is no longer needed.");
}
}
@ -513,7 +509,7 @@ public abstract class AbstractListProcessor<T extends ListableEntity> extends Ab
return getIdentifier() + ".lastListingTime." + directory;
}
private EntityListing deserialize(final String serializedState) throws JsonParseException, JsonMappingException, IOException {
private EntityListing deserialize(final String serializedState) throws IOException {
final ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(serializedState, EntityListing.class);
}

View File

@ -30,7 +30,6 @@ import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.distributed.cache.client.Deserializer;
import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
import org.apache.nifi.distributed.cache.client.Serializer;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
@ -51,7 +50,6 @@ import org.apache.nifi.util.TestRunners;
import org.glassfish.jersey.internal.guava.Predicates;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.api.io.TempDir;
import java.io.File;
@ -66,7 +64,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Predicate;
import java.util.regex.Matcher;
@ -79,34 +76,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestAbstractListProcessor {
/**
* @return current timestamp in milliseconds, but truncated at specified target precision (e.g. SECONDS or MINUTES).
*/
private static long getCurrentTimestampMillis(final TimeUnit targetPrecision) {
final long timestampInTargetPrecision = targetPrecision.convert(System.currentTimeMillis(), TimeUnit.MILLISECONDS);
return TimeUnit.MILLISECONDS.convert(timestampInTargetPrecision, targetPrecision);
}
private static long getSleepMillis(final TimeUnit targetPrecision) {
return AbstractListProcessor.LISTING_LAG_MILLIS.get(targetPrecision) * 2;
}
private ConcreteListProcessor proc;
private TestRunner runner;
@TempDir
private Path testFolder;
@RegisterExtension
private final ListProcessorTestWatcher dumpState = new ListProcessorTestWatcher(
() -> {
try {
return runner.getStateManager().getState(Scope.LOCAL).toMap();
} catch (IOException e) {
throw new RuntimeException("Failed to retrieve state", e);
}
},
() -> proc.getEntityList(),
() -> runner.getFlowFilesForRelationship(AbstractListProcessor.REL_SUCCESS).stream().map(m -> (FlowFile) m).collect(Collectors.toList())
);
@BeforeEach
public void setup() {
@ -168,16 +141,14 @@ public class TestAbstractListProcessor {
runner.run();
final MockStateManager stateManager = runner.getStateManager();
final Map<String, String> expectedState = new HashMap<>();
// Ensure only timestamp is migrated
expectedState.put(AbstractListProcessor.LATEST_LISTED_ENTRY_TIMESTAMP_KEY, "1492");
expectedState.put(AbstractListProcessor.LAST_PROCESSED_LATEST_ENTRY_TIMESTAMP_KEY, "1492");
final Map<String, String> expectedState = Map.of(
AbstractListProcessor.LATEST_LISTED_ENTRY_TIMESTAMP_KEY, "1492",
AbstractListProcessor.LAST_PROCESSED_LATEST_ENTRY_TIMESTAMP_KEY, "1492");
stateManager.assertStateEquals(expectedState, Scope.CLUSTER);
}
@Test
public void testNoStateToMigrate() throws Exception {
public void testNoStateToMigrate() {
runner.run();
final MockStateManager stateManager = runner.getStateManager();

View File

@ -17,6 +17,31 @@
package org.apache.nifi.avro;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.sql.Blob;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.temporal.ChronoUnit;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import org.apache.avro.Conversions;
import org.apache.avro.JsonProperties;
import org.apache.avro.LogicalType;
@ -54,31 +79,6 @@ import org.apache.nifi.serialization.record.util.IllegalTypeConversionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.sql.Blob;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.temporal.ChronoUnit;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
public class AvroTypeUtil {
private static final Logger logger = LoggerFactory.getLogger(AvroTypeUtil.class);
public static final String AVRO_SCHEMA_FORMAT = "avro";
@ -770,7 +770,7 @@ public class AvroTypeUtil {
// If the desired scale is different than this value's coerce scale.
final int desiredScale = decimalType.getScale();
final BigDecimal decimal = rawDecimal.scale() == desiredScale
? rawDecimal : rawDecimal.setScale(desiredScale, BigDecimal.ROUND_HALF_UP);
? rawDecimal : rawDecimal.setScale(desiredScale, RoundingMode.HALF_UP);
return fieldSchema.getType() == Type.BYTES
? new Conversions.DecimalConversion().toBytes(decimal, fieldSchema, logicalType) //return GenericByte
: new Conversions.DecimalConversion().toFixed(decimal, fieldSchema, logicalType); //return GenericFixed

View File

@ -18,6 +18,23 @@
package org.apache.nifi.avro;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.sql.Date;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.avro.Conversions;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
@ -42,24 +59,6 @@ import org.apache.nifi.serialization.record.type.RecordDataType;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.EnabledIfSystemProperty;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.sql.Date;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.stream.Collectors;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@ -607,7 +606,7 @@ public class TestAvroTypeUtil {
final Schema.Field field = new Schema.Field("amount", fieldSchema, null, (Object)null);
// Create an overall record schema with the amount field
final Schema avroSchema = Schema.createRecord(Collections.singletonList(field));
final Schema avroSchema = Schema.createRecord(null, null, null, false, Collections.singletonList(field));
// Create an example Avro record with the amount field of type fixed and a logical type of decimal
final BigDecimal expectedBigDecimal = new BigDecimal("1234567890.12345678");
@ -629,7 +628,7 @@ public class TestAvroTypeUtil {
final Schema.Field field = new Schema.Field("amount", fieldSchema, null, (Object)null);
// Create an overall record schema with the amount field
final Schema avroSchema = Schema.createRecord(Collections.singletonList(field));
final Schema avroSchema = Schema.createRecord(null, null, null, false, Collections.singletonList(field));
// Create an example Avro record with the amount field of type binary and a logical type of decimal
final BigDecimal expectedBigDecimal = new BigDecimal("1234567890.12345678");
@ -771,16 +770,16 @@ public class TestAvroTypeUtil {
public void testMapToRecordConversion() {
final Charset charset = Charset.forName("UTF-8");
Object o = AvroTypeUtil.convertToAvroObject(Collections.singletonMap("Hello", "World"),
Schema.createRecord(Collections.singletonList(new Field("Hello", Schema.create(Type.STRING), "", ""))), charset);
Schema.createRecord(null, null, null, false, Collections.singletonList(new Field("Hello", Schema.create(Type.STRING), "", ""))), charset);
assertTrue(o instanceof Record);
assertEquals("World", ((Record) o).get("Hello"));
}
@Test
public void testListAndMapConversion() {
Schema s = Schema.createRecord(Arrays.asList(
new Field("List", Schema.createArray(Schema.createRecord(
Arrays.asList(
Schema s = Schema.createRecord(null, null, null, false, List.of(
new Field("List", Schema.createArray(Schema.createRecord(null, null, null, false,
List.of(
new Field("Message", Schema.create(Type.STRING), "", "")
)
)), "", null)
@ -1269,6 +1268,6 @@ public class TestAvroTypeUtil {
new Field("numbers", Schema.createMap(Schema.create(Type.LONG)), "", defaultLongMap)
);
return Schema.createRecord(avroFields);
return Schema.createRecord(null, null, null, false, avroFields);
}
}

View File

@ -17,7 +17,9 @@
package org.apache.nifi.csv;
import java.util.Map;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.DuplicateHeaderMode;
import org.apache.commons.csv.QuoteMode;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.nifi.components.AllowableValue;
@ -29,8 +31,6 @@ import org.apache.nifi.processor.util.StandardValidators;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
public class CSVUtils {
private static final Logger LOG = LoggerFactory.getLogger(CSVUtils.class);
@ -146,14 +146,12 @@ public class CSVUtils {
public static final PropertyDescriptor ALLOW_DUPLICATE_HEADER_NAMES = new PropertyDescriptor.Builder()
.name("csvutils-allow-duplicate-header-names")
.displayName("Allow Duplicate Header Names")
.description("Whether duplicate header names are allowed. Header names are case-sensitive, for example \"name\" and \"Name\" are treated as separate fields. " +
"Handling of duplicate header names is CSV Parser specific (where applicable):\n" +
"* Apache Commons CSV - duplicate headers will result in column data \"shifting\" right with new fields " +
"created for \"unknown_field_index_X\" where \"X\" is the CSV column index number\n" +
"* Jackson CSV - duplicate headers will be de-duplicated with the field value being that of the right-most " +
"duplicate CSV column\n" +
"* FastCSV - duplicate headers will be de-duplicated with the field value being that of the left-most " +
"duplicate CSV column")
.description("""
Whether duplicate header names are allowed. Header names are case-sensitive, for example "name" and "Name" are treated as separate fields.
Handling of duplicate header names is CSV Parser specific (where applicable):
* Apache Commons CSV - duplicate headers will result in column data "shifting" right with new fields created for "unknown_field_index_X" where "X" is the CSV column index number
* Jackson CSV - duplicate headers will be de-duplicated with the field value being that of the right-most duplicate CSV column
* FastCSV - duplicate headers will be de-duplicated with the field value being that of the left-most duplicate CSV column""")
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.allowableValues("true", "false")
.dependsOn(CSV_FORMAT, CUSTOM)
@ -275,57 +273,60 @@ public class CSVUtils {
private static CSVFormat buildCustomFormat(final PropertyContext context, final Map<String, String> variables) {
final Character valueSeparator = getValueSeparatorCharUnescapedJava(context, variables);
CSVFormat format = CSVFormat.newFormat(valueSeparator)
.withAllowMissingColumnNames()
.withIgnoreEmptyLines();
CSVFormat.Builder builder = CSVFormat.Builder.create()
.setDelimiter(valueSeparator)
.setAllowMissingColumnNames(true)
.setIgnoreEmptyLines(true);
final PropertyValue firstLineIsHeaderPropertyValue = context.getProperty(FIRST_LINE_IS_HEADER);
if (firstLineIsHeaderPropertyValue.getValue() != null && firstLineIsHeaderPropertyValue.asBoolean()) {
format = format.withFirstRecordAsHeader();
if (firstLineIsHeaderPropertyValue != null && firstLineIsHeaderPropertyValue.isSet() && firstLineIsHeaderPropertyValue.asBoolean()) {
builder = builder.setHeader().setSkipHeaderRecord(true);
}
final Character quoteChar = getCharUnescaped(context, QUOTE_CHAR, variables);
format = format.withQuote(quoteChar);
builder = builder.setQuote(quoteChar);
final Character escapeChar = context.getProperty(CSVUtils.ESCAPE_CHAR).evaluateAttributeExpressions(variables).getValue().isEmpty() ? null : getCharUnescaped(context, ESCAPE_CHAR, variables);
format = format.withEscape(escapeChar);
builder = builder.setEscape(escapeChar);
format = format.withTrim(context.getProperty(TRIM_FIELDS).asBoolean());
builder = builder.setTrim(context.getProperty(TRIM_FIELDS).asBoolean());
if (context.getProperty(COMMENT_MARKER).isSet()) {
final Character commentMarker = getCharUnescaped(context, COMMENT_MARKER, variables);
if (commentMarker != null) {
format = format.withCommentMarker(commentMarker);
builder = builder.setCommentMarker(commentMarker);
}
}
if (context.getProperty(NULL_STRING).isSet()) {
format = format.withNullString(unescape(context.getProperty(NULL_STRING).getValue()));
builder = builder.setNullString(unescape(context.getProperty(NULL_STRING).getValue()));
}
final PropertyValue quoteValue = context.getProperty(QUOTE_MODE);
if (quoteValue != null && quoteValue.isSet()) {
final QuoteMode quoteMode = QuoteMode.valueOf(quoteValue.getValue());
format = format.withQuoteMode(quoteMode);
builder = builder.setQuoteMode(quoteMode);
}
final PropertyValue trailingDelimiterValue = context.getProperty(TRAILING_DELIMITER);
if (trailingDelimiterValue != null && trailingDelimiterValue.isSet()) {
final boolean trailingDelimiter = trailingDelimiterValue.asBoolean();
format = format.withTrailingDelimiter(trailingDelimiter);
builder = builder.setTrailingDelimiter(trailingDelimiter);
}
final PropertyValue recordSeparator = context.getProperty(RECORD_SEPARATOR);
if (recordSeparator != null && recordSeparator.isSet()) {
final String separator = unescape(recordSeparator.getValue());
format = format.withRecordSeparator(separator);
builder = builder.setRecordSeparator(separator);
}
final PropertyValue allowDuplicateHeaderNames = context.getProperty(ALLOW_DUPLICATE_HEADER_NAMES);
if (allowDuplicateHeaderNames != null && allowDuplicateHeaderNames.isSet()) {
format = format.withAllowDuplicateHeaderNames(allowDuplicateHeaderNames.asBoolean());
final boolean allow = allowDuplicateHeaderNames.asBoolean();
final DuplicateHeaderMode mode = allow ? DuplicateHeaderMode.ALLOW_ALL : DuplicateHeaderMode.DISALLOW;
builder = builder.setDuplicateHeaderMode(mode);
}
return format;
return builder.build();
}
public static String unescape(String input) {

View File

@ -17,16 +17,16 @@
package org.apache.nifi.csv;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.DuplicateHeaderMode;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.context.PropertyContext;
import org.apache.nifi.util.MockConfigurationContext;
import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
@ -85,11 +85,11 @@ public class CSVUtilsTest {
CSVFormat csvFormat = CSVUtils.createCSVFormat(context, Collections.emptyMap());
assertEquals('|', csvFormat.getDelimiter());
assertEquals("|", csvFormat.getDelimiterString());
assertEquals('\'', (char) csvFormat.getQuoteCharacter());
assertEquals('^', (char) csvFormat.getEscapeCharacter());
assertEquals('~', (char) csvFormat.getCommentMarker());
assertTrue(csvFormat.getAllowDuplicateHeaderNames());
assertEquals(DuplicateHeaderMode.ALLOW_ALL, csvFormat.getDuplicateHeaderMode());
}
@Test
@ -104,11 +104,11 @@ public class CSVUtilsTest {
CSVFormat csvFormat = CSVUtils.createCSVFormat(context, attributes);
assertEquals('|', csvFormat.getDelimiter());
assertEquals("|", csvFormat.getDelimiterString());
assertEquals('\'', (char) csvFormat.getQuoteCharacter());
assertEquals('^', (char) csvFormat.getEscapeCharacter());
assertEquals('~', (char) csvFormat.getCommentMarker());
assertFalse(csvFormat.getAllowDuplicateHeaderNames());
assertEquals(DuplicateHeaderMode.DISALLOW, csvFormat.getDuplicateHeaderMode());
}
@Test
@ -117,7 +117,7 @@ public class CSVUtilsTest {
CSVFormat csvFormat = CSVUtils.createCSVFormat(context, Collections.emptyMap());
assertEquals(',', csvFormat.getDelimiter());
assertEquals(",", csvFormat.getDelimiterString());
assertEquals('"', (char) csvFormat.getQuoteCharacter());
assertNull(csvFormat.getEscapeCharacter());
assertNull(csvFormat.getCommentMarker());
@ -135,7 +135,7 @@ public class CSVUtilsTest {
CSVFormat csvFormat = CSVUtils.createCSVFormat(context, attributes);
assertEquals(',', csvFormat.getDelimiter());
assertEquals(",", csvFormat.getDelimiterString());
assertEquals('"', (char) csvFormat.getQuoteCharacter());
assertEquals('\\', (char) csvFormat.getEscapeCharacter());
assertNull(csvFormat.getCommentMarker());

View File

@ -16,6 +16,27 @@
*/
package org.apache.nifi.authorization;
import java.io.File;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.xml.XMLConstants;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.authorization.annotation.AuthorizerContext;
import org.apache.nifi.authorization.exception.AuthorizationAccessException;
@ -44,28 +65,6 @@ import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.xml.sax.SAXException;
import javax.xml.XMLConstants;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import java.io.File;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Factory bean for loading the configured authorizer.
*/
@ -445,24 +444,18 @@ public class AuthorizerFactoryBean implements FactoryBean<Authorizer>, Disposabl
for (final Method method : authorizerClass.getMethods()) {
if (method.isAnnotationPresent(AuthorizerContext.class)) {
// make the method accessible
final boolean isAccessible = method.isAccessible();
method.setAccessible(true);
final Class<?>[] argumentTypes = method.getParameterTypes();
try {
final Class<?>[] argumentTypes = method.getParameterTypes();
// look for setters (single argument)
if (argumentTypes.length == 1) {
final Class<?> argumentType = argumentTypes[0];
// look for setters (single argument)
if (argumentTypes.length == 1) {
final Class<?> argumentType = argumentTypes[0];
// look for well known types
if (NiFiProperties.class.isAssignableFrom(argumentType)) {
// nifi properties injection
method.invoke(instance, properties);
}
// look for well known types
if (NiFiProperties.class.isAssignableFrom(argumentType)) {
// nifi properties injection
method.invoke(instance, properties);
}
} finally {
method.setAccessible(isAccessible);
}
}
}
@ -477,24 +470,18 @@ public class AuthorizerFactoryBean implements FactoryBean<Authorizer>, Disposabl
for (final Field field : authorizerClass.getDeclaredFields()) {
if (field.isAnnotationPresent(AuthorizerContext.class)) {
// make the method accessible
final boolean isAccessible = field.isAccessible();
field.setAccessible(true);
try {
// get the type
final Class<?> fieldType = field.getType();
// get the type
final Class<?> fieldType = field.getType();
// only consider this field if it isn't set yet
if (field.get(instance) == null) {
// look for well known types
if (NiFiProperties.class.isAssignableFrom(fieldType)) {
// nifi properties injection
field.set(instance, properties);
}
// only consider this field if it isn't set yet
if (field.get(instance) == null) {
// look for well known types
if (NiFiProperties.class.isAssignableFrom(fieldType)) {
// nifi properties injection
field.set(instance, properties);
}
} finally {
field.setAccessible(isAccessible);
}
}
}

View File

@ -17,11 +17,10 @@
package org.apache.nifi.web.api.entity;
import io.swagger.annotations.ApiModelProperty;
import org.apache.nifi.web.api.dto.PermissionsDTO;
import org.apache.nifi.web.api.dto.FlowAnalysisRuleDTO;
import org.apache.nifi.web.api.dto.status.FlowAnalysisRuleStatusDTO;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.nifi.web.api.dto.FlowAnalysisRuleDTO;
import org.apache.nifi.web.api.dto.PermissionsDTO;
import org.apache.nifi.web.api.dto.status.FlowAnalysisRuleStatusDTO;
/**
@ -49,9 +48,7 @@ public class FlowAnalysisRuleEntity extends ComponentEntity implements Permissib
/**
* @return The permissions for this component operations
*/
@ApiModelProperty(
value = "The permissions for this component operations."
)
@ApiModelProperty(value = "The permissions for this component operations.")
@Override
public PermissionsDTO getOperatePermissions() {
return operatePermissions;
@ -65,10 +62,7 @@ public class FlowAnalysisRuleEntity extends ComponentEntity implements Permissib
/**
* @return The status for this FlowAnalysisRule
*/
@ApiModelProperty(
value = "The status for this FlowAnalysisRule.",
readOnly = true
)
@ApiModelProperty(value = "The status for this FlowAnalysisRule.", accessMode = ApiModelProperty.AccessMode.READ_ONLY)
public FlowAnalysisRuleStatusDTO getStatus() {
return status;
}

View File

@ -16,6 +16,20 @@
*/
package org.apache.nifi.documentation.html;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.xml.stream.FactoryConfigurationError;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.apache.nifi.annotation.behavior.DynamicProperties;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.InputRequirement;
@ -28,10 +42,10 @@ import org.apache.nifi.annotation.behavior.SystemResourceConsideration;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.DeprecationNotice;
import org.apache.nifi.annotation.documentation.MultiProcessorUseCase;
import org.apache.nifi.annotation.documentation.ProcessorConfiguration;
import org.apache.nifi.annotation.documentation.SeeAlso;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.documentation.UseCase;
import org.apache.nifi.annotation.documentation.ProcessorConfiguration;
import org.apache.nifi.bundle.Bundle;
import org.apache.nifi.bundle.BundleCoordinate;
import org.apache.nifi.components.AllowableValue;
@ -43,28 +57,12 @@ import org.apache.nifi.components.resource.ResourceDefinition;
import org.apache.nifi.components.resource.ResourceType;
import org.apache.nifi.controller.ControllerService;
import org.apache.nifi.documentation.DocumentationWriter;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.nar.ExtensionDefinition;
import org.apache.nifi.nar.ExtensionManager;
import org.apache.nifi.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.xml.stream.FactoryConfigurationError;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* Generates HTML documentation for a ConfigurableComponent. This class is used
* to generate documentation for ControllerService, ParameterProvider, and ReportingTask because
@ -862,27 +860,12 @@ public class HtmlDocumentationWriter implements DocumentationWriter {
xmlStreamWriter.writeCharacters(dynamicProperty.description());
xmlStreamWriter.writeEmptyElement("br");
String text;
if (dynamicProperty.expressionLanguageScope().equals(ExpressionLanguageScope.NONE)) {
if (dynamicProperty.supportsExpressionLanguage()) {
text = "Supports Expression Language: true (undefined scope)";
} else {
text = "Supports Expression Language: false";
}
} else {
switch(dynamicProperty.expressionLanguageScope()) {
case FLOWFILE_ATTRIBUTES:
text = "Supports Expression Language: true (will be evaluated using flow file attributes and Environment variables)";
break;
case ENVIRONMENT:
text = "Supports Expression Language: true (will be evaluated using Environment variables only)";
break;
default:
text = "Supports Expression Language: false";
break;
}
}
final String text = switch (dynamicProperty.expressionLanguageScope()) {
case FLOWFILE_ATTRIBUTES -> "Supports Expression Language: true (will be evaluated using flow file attributes and Environment variables)";
case ENVIRONMENT -> "Supports Expression Language: true (will be evaluated using Environment variables only)";
default -> "Supports Expression Language: false";
};
writeSimpleElement(xmlStreamWriter, "strong", text);
xmlStreamWriter.writeEndElement();

View File

@ -21,6 +21,29 @@ import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonInclude.Value;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.module.jaxb.JaxbAnnotationIntrospector;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.GZIPInputStream;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MultivaluedHashMap;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import okhttp3.Call;
import okhttp3.ConnectionPool;
import okhttp3.Headers;
@ -43,30 +66,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.StreamUtils;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MultivaluedHashMap;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.GZIPInputStream;
public class OkHttpReplicationClient implements HttpReplicationClient {
private static final Logger logger = LoggerFactory.getLogger(OkHttpReplicationClient.class);
private static final Set<String> gzipEncodings = Stream.of("gzip", "x-gzip").collect(Collectors.toSet());
@ -241,7 +240,7 @@ public class OkHttpReplicationClient implements HttpReplicationClient {
final byte[] serialized = serializeEntity(entity, contentType, gzip);
final MediaType mediaType = MediaType.parse(contentType);
return RequestBody.create(mediaType, serialized);
return RequestBody.create(serialized, mediaType);
}
private String getContentType(final Map<String, String> headers, final String defaultValue) {

View File

@ -16,6 +16,35 @@
*/
package org.apache.nifi.controller;
import java.lang.management.ThreadInfo;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
import org.apache.nifi.annotation.behavior.Restricted;
@ -89,36 +118,6 @@ import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.management.ThreadInfo;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.Objects.requireNonNull;
/**
@ -1614,7 +1613,7 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
final ActiveTask activeTask = entry.getValue();
final Long timestamp = activeTask.getStartTime();
final long activeMillis = now - timestamp;
final ThreadInfo threadInfo = threadInfoMap.get(thread.getId());
final ThreadInfo threadInfo = threadInfoMap.get(thread.threadId());
final String stackTrace = ThreadUtils.createStackTrace(threadInfo, threadDetails.getDeadlockedThreadIds(), threadDetails.getMonitorDeadlockThreadIds());

View File

@ -16,6 +16,40 @@
*/
package org.apache.nifi.controller.repository;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.Closeable;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateManager;
import org.apache.nifi.components.state.StateMap;
@ -70,41 +104,6 @@ import org.apache.nifi.util.FormatUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.Closeable;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* <p>
* Provides a ProcessSession that ensures all accesses, changes and transfers
@ -2657,12 +2656,7 @@ public class StandardProcessSession implements ProcessSession, ProvenanceEventEn
}
@Override
public void read(final FlowFile source, final InputStreamCallback reader) {
read(source, true, reader);
}
@Override
public void read(FlowFile source, boolean allowSessionStreamManagement, InputStreamCallback reader) {
public void read(FlowFile source, final InputStreamCallback reader) {
verifyTaskActive();
source = validateRecordState(source, true);
@ -2691,12 +2685,6 @@ public class StandardProcessSession implements ProcessSession, ProvenanceEventEn
try {
incrementReadCount(source);
reader.process(createTaskTerminationStream(ffais));
// Allow processors to close the file after reading to avoid too many files open or do smart session stream management.
if (rawIn == currentReadClaimStream && !allowSessionStreamManagement) {
currentReadClaimStream.close();
currentReadClaimStream = null;
}
} catch (final ContentNotFoundException cnfe) {
cnfeThrown = true;
throw cnfe;

View File

@ -16,6 +16,14 @@
*/
package org.apache.nifi.controller.repository.scheduling;
import java.time.Duration;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.resource.ResourceReference;
@ -34,14 +42,6 @@ import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.scheduling.ExecutionNode;
import org.apache.nifi.util.Connectables;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* This class is essentially an empty shell for {@link Connectable}s that are not Processors
*/
@ -101,6 +101,11 @@ public class ConnectableProcessContext implements ProcessContext {
return null;
}
@Override
public Duration asDuration() {
return null;
}
@Override
public Double asDataSize(final DataUnit dataUnit) {
return null;

View File

@ -20,11 +20,9 @@ package org.apache.nifi.controller.state;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import org.apache.nifi.components.state.StateMap;
public class StandardStateMap implements StateMap {
private static final int EMPTY_VERSION = -1;
private final Map<String, String> stateValues;
@ -35,11 +33,6 @@ public class StandardStateMap implements StateMap {
this.stateVersion = stateVersion;
}
@Override
public long getVersion() {
return stateVersion.map(version -> version.hashCode()).orElse(EMPTY_VERSION);
}
@Override
public Optional<String> getStateVersion() {
return stateVersion;

View File

@ -17,6 +17,20 @@
package org.apache.nifi.controller.state.manager;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.net.ssl.SSLContext;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.nifi.attribute.expression.language.Query;
import org.apache.nifi.attribute.expression.language.StandardPropertyValue;
@ -44,6 +58,7 @@ import org.apache.nifi.controller.state.StandardStateProviderInitializationConte
import org.apache.nifi.controller.state.config.StateManagerConfiguration;
import org.apache.nifi.controller.state.config.StateProviderConfiguration;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.logging.StandardLoggingContext;
import org.apache.nifi.nar.ExtensionManager;
import org.apache.nifi.nar.NarCloseable;
import org.apache.nifi.parameter.ExpressionLanguageAwareParameterParser;
@ -51,7 +66,6 @@ import org.apache.nifi.parameter.ParameterLookup;
import org.apache.nifi.parameter.ParameterParser;
import org.apache.nifi.parameter.ParameterTokenList;
import org.apache.nifi.processor.SimpleProcessLogger;
import org.apache.nifi.logging.StandardLoggingContext;
import org.apache.nifi.processor.StandardValidationContext;
import org.apache.nifi.security.util.SslContextFactory;
import org.apache.nifi.security.util.StandardTlsConfiguration;
@ -61,21 +75,6 @@ import org.apache.nifi.util.NiFiProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.SSLContext;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class StandardStateManagerProvider implements StateManagerProvider {
private static final Logger logger = LoggerFactory.getLogger(StandardStateManagerProvider.class);
@ -370,24 +369,18 @@ public class StandardStateManagerProvider implements StateManagerProvider {
for (final Method method : stateProviderClass.getMethods()) {
if (method.isAnnotationPresent(StateProviderContext.class)) {
// make the method accessible
final boolean isAccessible = method.isAccessible();
method.setAccessible(true);
final Class<?>[] argumentTypes = method.getParameterTypes();
try {
final Class<?>[] argumentTypes = method.getParameterTypes();
// look for setters (single argument)
if (argumentTypes.length == 1) {
final Class<?> argumentType = argumentTypes[0];
// look for setters (single argument)
if (argumentTypes.length == 1) {
final Class<?> argumentType = argumentTypes[0];
// look for well known types
if (NiFiProperties.class.isAssignableFrom(argumentType)) {
// nifi properties injection
method.invoke(instance, nifiProperties);
}
// look for well known types
if (NiFiProperties.class.isAssignableFrom(argumentType)) {
// nifi properties injection
method.invoke(instance, nifiProperties);
}
} finally {
method.setAccessible(isAccessible);
}
}
}

View File

@ -16,10 +16,9 @@
*/
package org.apache.nifi.logging;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.slf4j.LoggerFactory;
import static java.util.Objects.requireNonNull;
@ -49,7 +48,7 @@ public class LogRepositoryFactory {
LogRepository repository = repositoryMap.get(requireNonNull(componentId));
if (repository == null) {
try {
repository = logRepositoryClass.newInstance();
repository = logRepositoryClass.getDeclaredConstructor().newInstance();
} catch (final Exception e) {
throw new RuntimeException(e);
}

View File

@ -16,6 +16,16 @@
*/
package org.apache.nifi.controller;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Proxy;
import java.net.URL;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import javax.net.ssl.SSLContext;
import org.apache.commons.lang3.ClassUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.RequiresInstanceClassLoading;
@ -84,16 +94,6 @@ import org.apache.nifi.validation.RuleViolationsManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.SSLContext;
import java.lang.reflect.Proxy;
import java.net.URL;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
public class ExtensionBuilder {
private static final Logger logger = LoggerFactory.getLogger(ExtensionBuilder.class);
@ -603,7 +603,8 @@ public class ExtensionBuilder {
}
private ControllerServiceNode createControllerServiceNode(final StandardLoggingContext loggingContext)
throws ClassNotFoundException, IllegalAccessException, InstantiationException, InitializationException {
throws ClassNotFoundException, IllegalAccessException, InstantiationException, InitializationException, NoSuchMethodException, InvocationTargetException {
final ClassLoader ctxClassLoader = Thread.currentThread().getContextClassLoader();
try {
final Bundle bundle = extensionManager.getBundle(bundleCoordinate);
@ -616,7 +617,7 @@ public class ExtensionBuilder {
Thread.currentThread().setContextClassLoader(detectedClassLoader);
final Class<? extends ControllerService> controllerServiceClass = rawClass.asSubclass(ControllerService.class);
final ControllerService serviceImpl = controllerServiceClass.newInstance();
final ControllerService serviceImpl = controllerServiceClass.getDeclaredConstructor().newInstance();
final StandardControllerServiceInvocationHandler invocationHandler = new StandardControllerServiceInvocationHandler(extensionManager, serviceImpl);
@ -894,7 +895,8 @@ public class ExtensionBuilder {
}
private <T extends ConfigurableComponent> LoggableComponent<T> createLoggableComponent(Class<T> nodeType, LoggingContext loggingContext)
throws ClassNotFoundException, IllegalAccessException, InstantiationException {
throws ClassNotFoundException, IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException {
final ClassLoader ctxClassLoader = Thread.currentThread().getContextClassLoader();
try {
final Bundle bundle = extensionManager.getBundle(bundleCoordinate);
@ -907,7 +909,7 @@ public class ExtensionBuilder {
final Class<?> rawClass = Class.forName(type, true, detectedClassLoader);
Thread.currentThread().setContextClassLoader(detectedClassLoader);
final Object extensionInstance = rawClass.newInstance();
final Object extensionInstance = rawClass.getDeclaredConstructor().newInstance();
final ComponentLog componentLog = new SimpleProcessLogger(identifier, extensionInstance, loggingContext);
final TerminationAwareLogger terminationAwareLogger = new TerminationAwareLogger(componentLog);

View File

@ -16,6 +16,21 @@
*/
package org.apache.nifi.controller.flow;
import java.lang.reflect.InvocationTargetException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import javax.net.ssl.SSLContext;
import org.apache.nifi.annotation.documentation.DeprecationNotice;
import org.apache.nifi.annotation.lifecycle.OnAdded;
import org.apache.nifi.annotation.lifecycle.OnConfigurationRestored;
@ -93,21 +108,6 @@ import org.apache.nifi.web.api.dto.FlowSnippetDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.SSLContext;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import static java.util.Objects.requireNonNull;
public class StandardFlowManager extends AbstractFlowManager implements FlowManager {
@ -314,7 +314,13 @@ public class StandardFlowManager extends AbstractFlowManager implements FlowMana
Thread.currentThread().setContextClassLoader(detectedClassLoaderForType);
final Class<? extends FlowFilePrioritizer> prioritizerClass = rawClass.asSubclass(FlowFilePrioritizer.class);
final Object processorObj = prioritizerClass.newInstance();
final Object processorObj;
try {
processorObj = prioritizerClass.getDeclaredConstructor().newInstance();
} catch (final InvocationTargetException | NoSuchMethodException e) {
throw new ClassNotFoundException("Could not find class or default no-arg constructor for " + type, e);
}
prioritizer = prioritizerClass.cast(processorObj);
return prioritizer;

View File

@ -16,6 +16,16 @@
*/
package org.apache.nifi.controller.repository;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.controller.queue.QueueSize;
@ -31,17 +41,6 @@ import org.apache.nifi.provenance.ProvenanceReporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import java.util.regex.Pattern;
public class BatchingSessionFactory implements ProcessSessionFactory {
private static final Logger logger = LoggerFactory.getLogger(BatchingSessionFactory.class);
@ -226,11 +225,6 @@ public class BatchingSessionFactory implements ProcessSessionFactory {
session.read(source, reader);
}
@Override
public void read(FlowFile source, boolean allowSessionStreamManagement, InputStreamCallback reader) {
session.read(source, allowSessionStreamManagement, reader);
}
@Override
public InputStream read(FlowFile flowFile) {
return session.read(flowFile);

View File

@ -16,27 +16,6 @@
*/
package org.apache.nifi.controller.repository;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.controller.repository.claim.ContentClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaimManager;
import org.apache.nifi.controller.repository.claim.StandardContentClaim;
import org.apache.nifi.controller.repository.io.ContentClaimOutputStream;
import org.apache.nifi.controller.repository.io.LimitedInputStream;
import org.apache.nifi.engine.FlowEngine;
import org.apache.nifi.events.EventReporter;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.reporting.Severity;
import org.apache.nifi.stream.io.ByteCountingOutputStream;
import org.apache.nifi.stream.io.StreamUtils;
import org.apache.nifi.stream.io.SynchronizedByteCountingOutputStream;
import org.apache.nifi.util.FormatUtils;
import org.apache.nifi.util.NiFiProperties;
import org.apache.nifi.util.StopWatch;
import org.apache.nifi.util.file.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.Closeable;
import java.io.EOFException;
@ -55,7 +34,6 @@ import java.nio.file.SimpleFileVisitor;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
@ -79,6 +57,26 @@ import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.controller.repository.claim.ContentClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaimManager;
import org.apache.nifi.controller.repository.claim.StandardContentClaim;
import org.apache.nifi.controller.repository.io.ContentClaimOutputStream;
import org.apache.nifi.controller.repository.io.LimitedInputStream;
import org.apache.nifi.engine.FlowEngine;
import org.apache.nifi.events.EventReporter;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.reporting.Severity;
import org.apache.nifi.stream.io.ByteCountingOutputStream;
import org.apache.nifi.stream.io.StreamUtils;
import org.apache.nifi.stream.io.SynchronizedByteCountingOutputStream;
import org.apache.nifi.util.FormatUtils;
import org.apache.nifi.util.NiFiProperties;
import org.apache.nifi.util.StopWatch;
import org.apache.nifi.util.file.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Is thread safe
@ -786,35 +784,6 @@ public class FileSystemRepository implements ContentRepository {
return newClaim;
}
@Override
public long merge(final Collection<ContentClaim> claims, final ContentClaim destination, final byte[] header, final byte[] footer, final byte[] demarcator) throws IOException {
if (claims.contains(destination)) {
throw new IllegalArgumentException("destination cannot be within claims");
}
try (final ByteCountingOutputStream out = new ByteCountingOutputStream(write(destination))) {
if (header != null) {
out.write(header);
}
int i = 0;
for (final ContentClaim claim : claims) {
try (final InputStream in = read(claim)) {
StreamUtils.copy(in, out);
}
if (++i < claims.size() && demarcator != null) {
out.write(demarcator);
}
}
if (footer != null) {
out.write(footer);
}
return out.getBytesWritten();
}
}
@Override
public long importFrom(final Path content, final ContentClaim claim) throws IOException {

View File

@ -17,15 +17,13 @@
package org.apache.nifi.controller.repository;
import org.apache.nifi.controller.repository.claim.ContentClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Set;
import org.apache.nifi.controller.repository.claim.ContentClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
public class NonPurgeableContentRepository implements ContentRepository {
private final ContentRepository delegate;
@ -94,12 +92,6 @@ public class NonPurgeableContentRepository implements ContentRepository {
return delegate.clone(original, lossTolerant);
}
@Override
@Deprecated
public long merge(final Collection<ContentClaim> claims, final ContentClaim destination, final byte[] header, final byte[] footer, final byte[] demarcator) throws IOException {
return delegate.merge(claims, destination, header, footer, demarcator);
}
@Override
public long importFrom(final Path content, final ContentClaim claim) throws IOException {
return delegate.importFrom(content, claim);

View File

@ -17,6 +17,26 @@
package org.apache.nifi.controller.serialization;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.zip.GZIPInputStream;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.nifi.authorization.Authorizer;
import org.apache.nifi.authorization.AuthorizerCapabilityDetection;
@ -41,7 +61,6 @@ import org.apache.nifi.controller.flow.FlowManager;
import org.apache.nifi.controller.flow.VersionedDataflow;
import org.apache.nifi.controller.flow.VersionedFlowEncodingVersion;
import org.apache.nifi.controller.flowanalysis.FlowAnalysisRuleInstantiationException;
import org.apache.nifi.flow.VersionedFlowAnalysisRule;
import org.apache.nifi.controller.inheritance.AuthorizerCheck;
import org.apache.nifi.controller.inheritance.BundleCompatibilityCheck;
import org.apache.nifi.controller.inheritance.ConnectionMissingCheck;
@ -58,6 +77,7 @@ import org.apache.nifi.flow.VersionedComponent;
import org.apache.nifi.flow.VersionedConfigurableExtension;
import org.apache.nifi.flow.VersionedControllerService;
import org.apache.nifi.flow.VersionedExternalFlow;
import org.apache.nifi.flow.VersionedFlowAnalysisRule;
import org.apache.nifi.flow.VersionedFlowRegistryClient;
import org.apache.nifi.flow.VersionedParameter;
import org.apache.nifi.flow.VersionedParameterContext;
@ -104,27 +124,6 @@ import org.apache.nifi.web.api.dto.BundleDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.zip.GZIPInputStream;
public class VersionedFlowSynchronizer implements FlowSynchronizer {
private static final Logger logger = LoggerFactory.getLogger(VersionedFlowSynchronizer.class);
/**
@ -564,7 +563,7 @@ public class VersionedFlowSynchronizer implements FlowSynchronizer {
* @return <code>true</code> if the client is from an older configuration, <code>false</code> otherwise.
*/
private boolean isOldStyleRegistryClient(final VersionedFlowRegistryClient client) {
return client.getId() != null && client.getIdentifier() == null && client.getBundle() == null;
return client.getIdentifier() != null && client.getIdentifier() == null && client.getBundle() == null;
}
/**
@ -579,7 +578,7 @@ public class VersionedFlowSynchronizer implements FlowSynchronizer {
chosenCoordinate = DEPRECATED_FLOW_REGISTRY_BUNDLE;
}
final FlowRegistryClientNode flowRegistryClient = flowManager.createFlowRegistryClient(DEPRECATED_FLOW_REGISTRY_CLIENT_TYPE, client.getId(),
final FlowRegistryClientNode flowRegistryClient = flowManager.createFlowRegistryClient(DEPRECATED_FLOW_REGISTRY_CLIENT_TYPE, client.getIdentifier(),
chosenCoordinate, Collections.emptySet(), false,true, null);
flowRegistryClient.setName(client.getName());

View File

@ -16,6 +16,9 @@
*/
package org.apache.nifi.spring;
import java.lang.reflect.InvocationTargetException;
import java.util.Optional;
import java.util.Set;
import org.apache.nifi.controller.leader.election.LeaderElectionManager;
import org.apache.nifi.controller.leader.election.StandaloneLeaderElectionManager;
import org.apache.nifi.nar.ExtensionDefinition;
@ -24,9 +27,6 @@ import org.apache.nifi.nar.NarThreadContextClassLoader;
import org.apache.nifi.util.NiFiProperties;
import org.springframework.beans.factory.FactoryBean;
import java.util.Optional;
import java.util.Set;
import static org.apache.nifi.util.NiFiProperties.CLUSTER_LEADER_ELECTION_IMPLEMENTATION;
import static org.apache.nifi.util.NiFiProperties.DEFAULT_CLUSTER_LEADER_ELECTION_IMPLEMENTATION;
@ -63,7 +63,7 @@ public class LeaderElectionManagerFactoryBean implements FactoryBean<LeaderElect
this.properties = properties;
}
private LeaderElectionManager loadClusterLeaderElectionManager() throws ClassNotFoundException, InstantiationException, IllegalAccessException {
private LeaderElectionManager loadClusterLeaderElectionManager() throws ClassNotFoundException, InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException {
final String leaderElectionImplementation = properties.getProperty(CLUSTER_LEADER_ELECTION_IMPLEMENTATION, DEFAULT_CLUSTER_LEADER_ELECTION_IMPLEMENTATION);
final Set<ExtensionDefinition> extensions = extensionManager.getExtensions(LeaderElectionManager.class);
final Optional<ExtensionDefinition> extensionFound = extensions.stream()

View File

@ -16,6 +16,39 @@
*/
package org.apache.nifi.controller.repository;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.io.IOUtils;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateMap;
@ -69,40 +102,6 @@ import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
@ -1299,12 +1298,7 @@ public class StandardProcessSessionIT {
final FlowFile flowFile = session.get();
assertNotNull(flowFile);
final AtomicReference<InputStream> inputStreamHolder = new AtomicReference<>(null);
session.read(flowFile, true, new InputStreamCallback() {
@Override
public void process(final InputStream inputStream) throws IOException {
inputStreamHolder.set(inputStream);
}
});
session.read(flowFile, inputStreamHolder::set);
assertDisabled(inputStreamHolder.get());
}
@ -1796,12 +1790,7 @@ public class StandardProcessSessionIT {
});
try {
standardProcessSessions[i].read(flowFile, false, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
StreamUtils.fillBuffer(in, buff);
}
});
standardProcessSessions[i].read(flowFile, in -> StreamUtils.fillBuffer(in, buff));
} catch (Exception e) {
System.out.println("Failed at file:" + i);
throw e;
@ -3379,10 +3368,6 @@ public class StandardProcessSessionIT {
return null;
}
@Override
public long merge(Collection<ContentClaim> claims, ContentClaim destination, byte[] header, byte[] footer, byte[] demarcator) throws IOException {
return 0;
}
private Path getPath(final ContentClaim contentClaim) {
final ResourceClaim claim = contentClaim.getResourceClaim();

View File

@ -16,24 +16,6 @@
*/
package org.apache.nifi.controller.repository;
import org.apache.nifi.controller.repository.claim.ContentClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
import org.apache.nifi.controller.repository.claim.StandardContentClaim;
import org.apache.nifi.controller.repository.claim.StandardResourceClaim;
import org.apache.nifi.controller.repository.claim.StandardResourceClaimManager;
import org.apache.nifi.controller.repository.util.DiskUtils;
import org.apache.nifi.events.EventReporter;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.stream.io.StreamUtils;
import org.apache.nifi.util.NiFiProperties;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.condition.DisabledOnOs;
import org.junit.jupiter.api.condition.OS;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
@ -57,6 +39,23 @@ import java.util.Locale;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.apache.nifi.controller.repository.claim.ContentClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
import org.apache.nifi.controller.repository.claim.StandardContentClaim;
import org.apache.nifi.controller.repository.claim.StandardResourceClaim;
import org.apache.nifi.controller.repository.claim.StandardResourceClaimManager;
import org.apache.nifi.controller.repository.util.DiskUtils;
import org.apache.nifi.events.EventReporter;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.stream.io.StreamUtils;
import org.apache.nifi.util.NiFiProperties;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.condition.DisabledOnOs;
import org.junit.jupiter.api.condition.OS;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ -893,87 +892,6 @@ public class TestFileSystemRepository {
}
}
@Test
public void testMergeWithHeaderFooterDemarcator() throws IOException {
testMerge("HEADER", "FOOTER", "DEMARCATOR");
}
@Test
public void testMergeWithHeaderFooter() throws IOException {
testMerge("HEADER", "FOOTER", null);
}
@Test
public void testMergeWithHeaderOnly() throws IOException {
testMerge("HEADER", null, null);
}
@Test
public void testMergeWithFooterOnly() throws IOException {
testMerge(null, "FOOTER", null);
}
@Test
public void testMergeWithDemarcator() throws IOException {
testMerge(null, null, "DEMARCATOR");
}
@Test
public void testWithHeaderDemarcator() throws IOException {
testMerge("HEADER", null, "DEMARCATOR");
}
@Test
public void testMergeWithFooterDemarcator() throws IOException {
testMerge(null, "FOOTER", "DEMARCATOR");
}
@Test
public void testMergeWithoutHeaderFooterDemarcator() throws IOException {
testMerge(null, null, null);
}
private void testMerge(final String header, final String footer, final String demarcator) throws IOException {
final int count = 4;
final String content = "The quick brown fox jumps over the lazy dog";
final List<ContentClaim> claims = new ArrayList<>();
for (int i = 0; i < count; i++) {
final ContentClaim claim = repository.create(true);
claims.add(claim);
try (final OutputStream out = repository.write(claim)) {
out.write(content.getBytes());
}
}
final ContentClaim destination = repository.create(true);
final byte[] headerBytes = header == null ? null : header.getBytes();
final byte[] footerBytes = footer == null ? null : footer.getBytes();
final byte[] demarcatorBytes = demarcator == null ? null : demarcator.getBytes();
repository.merge(claims, destination, headerBytes, footerBytes, demarcatorBytes);
final StringBuilder sb = new StringBuilder();
if (header != null) {
sb.append(header);
}
for (int i = 0; i < count; i++) {
sb.append(content);
if (demarcator != null && i != count - 1) {
sb.append(demarcator);
}
}
if (footer != null) {
sb.append(footer);
}
final String expectedText = sb.toString();
final byte[] expected = expectedText.getBytes();
final ByteArrayOutputStream baos = new ByteArrayOutputStream((int) destination.getLength());
try (final InputStream in = repository.read(destination)) {
StreamUtils.copy(in, baos);
}
final byte[] actual = baos.toByteArray();
assertArrayEquals(expected, actual);
}
private byte[] readFully(final InputStream inStream, final int size) throws IOException {
final ByteArrayOutputStream baos = new ByteArrayOutputStream(size);

View File

@ -17,11 +17,6 @@
package org.apache.nifi.controller.state.providers;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
@ -29,11 +24,15 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Optional;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.components.state.StateProvider;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* <p>
@ -183,8 +182,8 @@ public abstract class AbstractTestStateProvider {
final StateProvider provider = getProvider();
final StateMap stateMap = new StateMap() {
@Override
public long getVersion() {
return 4;
public Optional<String> getStateVersion() {
return Optional.of("4");
}
@Override

View File

@ -24,15 +24,6 @@ import io.fabric8.kubernetes.api.model.StatusDetails;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.fabric8.kubernetes.client.KubernetesClientException;
import io.fabric8.kubernetes.client.dsl.Resource;
import org.apache.nifi.components.AbstractConfigurableComponent;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.components.state.StateProvider;
import org.apache.nifi.components.state.StateProviderInitializationContext;
import org.apache.nifi.kubernetes.client.ServiceAccountNamespaceProvider;
import org.apache.nifi.kubernetes.client.StandardKubernetesClientProvider;
import org.apache.nifi.logging.ComponentLog;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.nio.charset.Charset;
@ -48,11 +39,20 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.nifi.components.AbstractConfigurableComponent;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.components.state.StateProvider;
import org.apache.nifi.components.state.StateProviderInitializationContext;
import org.apache.nifi.kubernetes.client.ServiceAccountNamespaceProvider;
import org.apache.nifi.kubernetes.client.StandardKubernetesClientProvider;
import org.apache.nifi.logging.ComponentLog;
/**
* State Provider implementation based on Kubernetes ConfigMaps with Base64 encoded keys to meet Kubernetes constraints
*/
public class KubernetesConfigMapStateProvider extends AbstractConfigurableComponent implements StateProvider {
private static final int MAX_UPDATE_ATTEMPTS = 5;
private static final Scope[] SUPPORTED_SCOPES = { Scope.CLUSTER };
private static final Charset KEY_CHARACTER_SET = StandardCharsets.UTF_8;
@ -121,17 +121,65 @@ public class KubernetesConfigMapStateProvider extends AbstractConfigurableCompon
public void setState(final Map<String, String> state, final String componentId) throws IOException {
try {
final ConfigMap configMap = createConfigMapBuilder(state, componentId).build();
final ConfigMap configMapCreated = kubernetesClient.configMaps().resource(configMap).createOrReplace();
final Resource<ConfigMap> configMapResource = kubernetesClient.configMaps().resource(configMap);
ConfigMap configMapCreated = null;
// Attempt to create or update, up to 3 times. We expect that we will update more frequently than create
// so we first attempt to update. If we get back a 404, then we create it.
boolean create = false;
for (int attempt = 0; attempt < MAX_UPDATE_ATTEMPTS; attempt++) {
try {
if (create) {
configMapCreated = configMapResource.create();
} else {
configMapCreated = configMapResource.update();
}
break;
} catch (final KubernetesClientException e) {
final int returnCode = e.getCode();
if (returnCode == 404) {
// A 404 return code indicates that we need to create the resource instead of update it.
// Now, we will attempt to create the resource instead of update it, so we'll reset the attempt counter.
attempt = 0;
create = true;
continue;
}
if (returnCode >= 500) {
// Server-side error. We should retry, up to some number of attempts.
if (attempt == MAX_UPDATE_ATTEMPTS - 1) {
throw e;
}
} else {
// There's an issue with the request. Throw the Exception.
throw e;
}
} catch (final Exception e) {
if (attempt < MAX_UPDATE_ATTEMPTS - 1) {
logger.warn("Failed to update state for component with ID {}. Will attempt to update the resource again.", componentId, e);
} else {
logger.error("Failed to update state for component with ID {}", componentId, e);
throw e;
}
}
}
if (configMapCreated == null) {
throw new IOException("Exhausted maximum number of attempts (%s) to update state for component with ID %s but could not update it".formatted(MAX_UPDATE_ATTEMPTS, componentId));
}
final Optional<String> version = getVersion(configMapCreated);
logger.debug("Set State Component ID [{}] Version [{}]", componentId, version);
} catch (final KubernetesClientException e) {
if (isNotFound(e.getCode())) {
logger.debug("State not found for Component ID [{}]", componentId, e);
} else {
throw new IOException(String.format("Set failed for Component ID [%s]", componentId), e);
throw new IOException(String.format("Failed to update state for Component with ID [%s]", componentId), e);
}
} catch (final RuntimeException e) {
throw new IOException(String.format("Set failed for Component ID [%s]", componentId), e);
throw new IOException(String.format("Failed to update state for Component with ID [%s]", componentId), e);
}
}

View File

@ -16,11 +16,10 @@
*/
package org.apache.nifi.kubernetes.state.provider;
import org.apache.nifi.components.state.StateMap;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import org.apache.nifi.components.state.StateMap;
/**
* Standard implementation of StateMap
@ -37,15 +36,6 @@ class StandardStateMap implements StateMap {
this.version = version;
}
/**
* Get Version returns String.hashCode() or -1 on empty for compatibility
*
* @return Version
*/
@Override
public long getVersion() {
return version.map(stateVersion -> stateVersion.hashCode()).orElse(EMPTY_VERSION);
}
/**
* Get State Version

View File

@ -16,12 +16,6 @@
*/
package org.apache.nifi.nar;
import org.apache.nifi.bundle.Bundle;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
import org.junit.jupiter.api.condition.EnabledOnOs;
import org.junit.jupiter.api.condition.OS;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
@ -32,6 +26,11 @@ import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.nifi.bundle.Bundle;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
import org.junit.jupiter.api.condition.EnabledOnOs;
import org.junit.jupiter.api.condition.OS;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.hasItem;
@ -77,7 +76,7 @@ public class TestLoadNativeLibFromNar extends AbstractTestNarLoader {
Object actualJniMethodReturnValue = TestJNI
.getMethod("testJniMethod")
.invoke(TestJNI.newInstance());
.invoke(TestJNI.getDeclaredConstructor().newInstance());
assertEquals("calledNativeTestJniMethod", actualJniMethodReturnValue);
}
@ -119,7 +118,7 @@ public class TestLoadNativeLibFromNar extends AbstractTestNarLoader {
Object actualJniMethodReturnValue = TestJNI
.getMethod("testJniMethod")
.invoke(TestJNI.newInstance());
.invoke(TestJNI.getDeclaredConstructor().newInstance());
assertThat(actualLibraryLocation, containsString(instanceClassLoader.getIdentifier()));
assertEquals("calledNativeTestJniMethod", actualJniMethodReturnValue);

View File

@ -16,14 +16,6 @@
*/
package org.apache.nifi.nar;
import org.apache.nifi.bundle.Bundle;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
import org.junit.jupiter.api.condition.EnabledOnOs;
import org.junit.jupiter.api.condition.OS;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
@ -34,6 +26,13 @@ import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.nifi.bundle.Bundle;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
import org.junit.jupiter.api.condition.EnabledOnOs;
import org.junit.jupiter.api.condition.OS;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.hasItem;
@ -97,7 +96,7 @@ public class TestLoadNativeLibViaSystemProperty extends AbstractTestNarLoader {
Object actualJniMethodReturnValue = TestJNI
.getMethod("testJniMethod")
.invoke(TestJNI.newInstance());
.invoke(TestJNI.getDeclaredConstructor().newInstance());
assertEquals("calledNativeTestJniMethod", actualJniMethodReturnValue);
}
@ -139,7 +138,7 @@ public class TestLoadNativeLibViaSystemProperty extends AbstractTestNarLoader {
Object actualJniMethodReturnValue = TestJNI
.getMethod("testJniMethod")
.invoke(TestJNI.newInstance());
.invoke(TestJNI.getDeclaredConstructor().newInstance());
assertThat(actualLibraryLocation, containsString(instanceClassLoader.getIdentifier()));
assertEquals("calledNativeTestJniMethod", actualJniMethodReturnValue);

View File

@ -16,13 +16,12 @@
*/
package org.apache.nifi.init;
import org.apache.nifi.logging.ComponentLog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import org.apache.nifi.logging.ComponentLog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class is a copy of org.apache.nifi.util.ReflectionUtils. Ultimately the
@ -54,76 +53,64 @@ public class ReflectionUtils {
* methods threw an Exception or could not be invoked; if <code>false</code>
* is returned, an error will have been logged.
*/
public static boolean quietlyInvokeMethodsWithAnnotation(
final Class<? extends Annotation> annotation, final Object instance, final ComponentLog logger, final Object... args) {
public static boolean quietlyInvokeMethodsWithAnnotation(final Class<? extends Annotation> annotation, final Object instance, final ComponentLog logger, final Object... args) {
for (final Method method : instance.getClass().getMethods()) {
if (method.isAnnotationPresent(annotation)) {
final boolean isAccessible = method.isAccessible();
method.setAccessible(true);
final Class<?>[] argumentTypes = method.getParameterTypes();
if (argumentTypes.length > args.length) {
if (logger == null) {
LOG.error("Unable to invoke method {} on {} because method expects {} parameters but only {} were given",
new Object[]{method.getName(), instance, argumentTypes.length, args.length});
} else {
logger.error("Unable to invoke method {} on {} because method expects {} parameters but only {} were given",
new Object[]{method.getName(), instance, argumentTypes.length, args.length});
}
return false;
}
for (int i = 0; i < argumentTypes.length; i++) {
final Class<?> argType = argumentTypes[i];
if (!argType.isAssignableFrom(args[i].getClass())) {
if (logger == null) {
LOG.error("Unable to invoke method {} on {} because method parameter {} is expected to be of type {} but argument passed was of type {}",
new Object[]{method.getName(), instance, i, argType, args[i].getClass()});
} else {
logger.error("Unable to invoke method {} on {} because method parameter {} is expected to be of type {} but argument passed was of type {}",
new Object[]{method.getName(), instance, i, argType, args[i].getClass()});
}
return false;
}
}
try {
final Class<?>[] argumentTypes = method.getParameterTypes();
if (argumentTypes.length > args.length) {
if (logger == null) {
LOG.error("Unable to invoke method {} on {} because method expects {} parameters but only {} were given",
new Object[]{method.getName(), instance, argumentTypes.length, args.length});
} else {
logger.error("Unable to invoke method {} on {} because method expects {} parameters but only {} were given",
new Object[]{method.getName(), instance, argumentTypes.length, args.length});
}
if (argumentTypes.length == args.length) {
method.invoke(instance, args);
} else {
final Object[] argsToPass = new Object[argumentTypes.length];
System.arraycopy(args, 0, argsToPass, 0, argsToPass.length);
return false;
method.invoke(instance, argsToPass);
}
} catch (final InvocationTargetException ite) {
if (logger == null) {
LOG.error("Unable to invoke method {} on {} due to {}", new Object[]{method.getName(), instance, ite.getCause()});
LOG.error("", ite.getCause());
} else {
logger.error("Unable to invoke method {} on {} due to {}", new Object[]{method.getName(), instance, ite.getCause()});
}
} catch (final IllegalAccessException | IllegalArgumentException t) {
if (logger == null) {
LOG.error("Unable to invoke method {} on {} due to {}", new Object[]{method.getName(), instance, t});
LOG.error("", t);
} else {
logger.error("Unable to invoke method {} on {} due to {}", new Object[]{method.getName(), instance, t});
}
for (int i = 0; i < argumentTypes.length; i++) {
final Class<?> argType = argumentTypes[i];
if (!argType.isAssignableFrom(args[i].getClass())) {
if (logger == null) {
LOG.error("Unable to invoke method {} on {} because method parameter {} is expected to be of type {} but argument passed was of type {}",
new Object[]{method.getName(), instance, i, argType, args[i].getClass()});
} else {
logger.error("Unable to invoke method {} on {} because method parameter {} is expected to be of type {} but argument passed was of type {}",
new Object[]{method.getName(), instance, i, argType, args[i].getClass()});
}
return false;
}
}
try {
if (argumentTypes.length == args.length) {
method.invoke(instance, args);
} else {
final Object[] argsToPass = new Object[argumentTypes.length];
for (int i = 0; i < argsToPass.length; i++) {
argsToPass[i] = args[i];
}
method.invoke(instance, argsToPass);
}
} catch (final InvocationTargetException ite) {
if (logger == null) {
LOG.error("Unable to invoke method {} on {} due to {}", new Object[]{method.getName(), instance, ite.getCause()});
LOG.error("", ite.getCause());
} else {
logger.error("Unable to invoke method {} on {} due to {}", new Object[]{method.getName(), instance, ite.getCause()});
}
} catch (final IllegalAccessException | IllegalArgumentException t) {
if (logger == null) {
LOG.error("Unable to invoke method {} on {} due to {}", new Object[]{method.getName(), instance, t});
LOG.error("", t);
} else {
logger.error("Unable to invoke method {} on {} due to {}", new Object[]{method.getName(), instance, t});
}
return false;
}
} finally {
if (!isAccessible) {
method.setAccessible(false);
}
return false;
}
}
}

Some files were not shown because too many files have changed in this diff Show More