mirror of https://github.com/apache/nifi.git
NIFI-12220 Added ability to create Controller Services from migrateProperties
- Added ability to get raw property values from PropertyConfiguration instead of just effective values - Updated TestRunner to allow for testing these migration methods - Auto-enable newly created controller services if they are valid - Eliminated Proxy properties in all AWS processors and instead just make use of the Proxy Configuration controller service - Eliminated authentication properties from AWS processors and migrated all processors to using Controller Service or authentication This closes #7874 Signed-off-by: David Handermann <exceptionfactory@apache.org>
This commit is contained in:
parent
07b35e04b1
commit
a44b633252
|
@ -128,10 +128,83 @@ public interface PropertyConfiguration {
|
|||
return getPropertyValue(descriptor.getName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an optional value representing the "raw" value of the property with the given name. The "raw" value is
|
||||
* the value before any parameters are substituted.
|
||||
*
|
||||
* @param propertyName the name of the property
|
||||
* @return an empty optional if the value is null or unset, else an Optional representing the configured value
|
||||
*/
|
||||
Optional<String> getRawPropertyValue(String propertyName);
|
||||
|
||||
/**
|
||||
* Returns an optional value representing the "raw" value of the property identified by the given descriptor. The "raw" value is
|
||||
* the value before any parameters are substituted.
|
||||
*
|
||||
* @param descriptor the descriptor that identifies the property
|
||||
* @return an empty optional if the value is null or unset, else an Optional representing the configured value
|
||||
*/
|
||||
default Optional<String> getRawPropertyValue(PropertyDescriptor descriptor) {
|
||||
return getRawPropertyValue(descriptor.getName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map containing all of the configured properties
|
||||
* @return a Map containing the names and values of all configured properties
|
||||
*/
|
||||
Map<String, String> getProperties();
|
||||
|
||||
/**
|
||||
* Returns a map containing all of the raw property values
|
||||
*
|
||||
* @return a Map containing the names and values of all configured properties
|
||||
*/
|
||||
Map<String, String> getRawProperties();
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Creates a new Controller Service of the given type and configures it with the given property values. Note that if a Controller Service
|
||||
* already exists within the same scope and with the same implementation and configuration, a new service may not be created and instead
|
||||
* the existing service may be used.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* This allows for properties that were previously defined in the extension to be moved to a Controller Service. For example,
|
||||
* consider a Processor that has "Username" and "Password" properties. In the next version of the Processor, we want to support
|
||||
* multiple types of authentication, and we delegate the authentication to a Controller Service. Consider that the Controller Service
|
||||
* implementation we wish to use has a classname of {@code org.apache.nifi.services.authentication.UsernamePassword}. We might then
|
||||
* use this method as such:
|
||||
* </p>
|
||||
*
|
||||
* <pre><code>
|
||||
* // Create a new Controller Service of type org.apache.nifi.services.authentication.UsernamePassword whose Username and Password
|
||||
* // properties match those currently configured for this Processor.
|
||||
* final Map<String, String> serviceProperties = Map.of("Username", propertyConfiguration.getRawPropertyValue("Username"),
|
||||
* "Password", propertyConfiguration.getRawPropertyValue("Password"));
|
||||
* final String serviceId = propertyConfiguration.createControllerService("org.apache.nifi.services.authentication.UsernamePassword", serviceProperties);
|
||||
*
|
||||
* // Set our Authentication Service property to point to this new service.
|
||||
* propertyConfiguration.setProperty(AUTHENTICATION_SERVICE, serviceId);
|
||||
*
|
||||
* // Remove the Username and Password properties from this Processor, since we are now going to use then Authentication Service.
|
||||
* propertyConfiguration.removeProperty("Username");
|
||||
* propertyConfiguration.removeProperty("Password");
|
||||
* </code></pre>
|
||||
*
|
||||
* <p>
|
||||
* Note the use of {@link #getRawPropertyValue(String)} here instead of {@link #getPropertyValue(String)}. Because we want to set
|
||||
* the new Controller Service's value to the same value as is currently configured for the Processor's "Username" and "Password" properties,
|
||||
* we use {@link #getRawPropertyValue(String)}. This ensures that if the Processor is configured using Parameters, those Parameter
|
||||
* references are still held by the Controller Service.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* Also note that this method expects the classname of the implementation, not the classname of the interface.
|
||||
* </p>
|
||||
*
|
||||
* @param implementationClassName the fully qualified classname of the Controller Service implementation
|
||||
* @param serviceProperties the property values to configure the newly created Controller Service with
|
||||
* @return an identifier for the Controller Service
|
||||
*/
|
||||
String createControllerService(String implementationClassName, Map<String, String> serviceProperties);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,138 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.nifi.util;
|
||||
|
||||
import org.apache.nifi.migration.PropertyConfiguration;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
public class MockPropertyConfiguration implements PropertyConfiguration {
|
||||
private final Map<String, String> propertyRenames = new HashMap<>();
|
||||
private final Set<String> propertiesRemoved = new HashSet<>();
|
||||
private final Set<String> propertiesUpdated = new HashSet<>();
|
||||
private final Map<String, String> rawProperties;
|
||||
private final Set<CreatedControllerService> createdControllerServices = new HashSet<>();
|
||||
|
||||
|
||||
public MockPropertyConfiguration(final Map<String, String> propertyValues) {
|
||||
this.rawProperties = new HashMap<>(propertyValues);
|
||||
}
|
||||
|
||||
public PropertyMigrationResult toPropertyMigrationResult() {
|
||||
return new PropertyMigrationResult() {
|
||||
|
||||
@Override
|
||||
public Set<String> getPropertiesRemoved() {
|
||||
return Collections.unmodifiableSet(propertiesRemoved);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getPropertiesRenamed() {
|
||||
return Collections.unmodifiableMap(propertyRenames);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<CreatedControllerService> getCreatedControllerServices() {
|
||||
return Collections.unmodifiableSet(createdControllerServices);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getPropertiesUpdated() {
|
||||
return Collections.unmodifiableSet(propertiesUpdated);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean renameProperty(final String propertyName, final String newName) {
|
||||
propertyRenames.put(propertyName, newName);
|
||||
|
||||
final boolean hasProperty = hasProperty(propertyName);
|
||||
if (!hasProperty) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final String value = rawProperties.remove(propertyName);
|
||||
rawProperties.put(newName, value);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeProperty(final String propertyName) {
|
||||
propertiesRemoved.add(propertyName);
|
||||
|
||||
if (!hasProperty(propertyName)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
rawProperties.remove(propertyName);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasProperty(final String propertyName) {
|
||||
return rawProperties.containsKey(propertyName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isPropertySet(final String propertyName) {
|
||||
return rawProperties.get(propertyName) != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(final String propertyName, final String propertyValue) {
|
||||
propertiesUpdated.add(propertyName);
|
||||
rawProperties.put(propertyName, propertyValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<String> getPropertyValue(final String propertyName) {
|
||||
return getRawPropertyValue(propertyName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<String> getRawPropertyValue(final String propertyName) {
|
||||
return Optional.ofNullable(rawProperties.get(propertyName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getProperties() {
|
||||
return getRawProperties();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getRawProperties() {
|
||||
return Collections.unmodifiableMap(rawProperties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String createControllerService(final String implementationClassName, final Map<String, String> serviceProperties) {
|
||||
final String serviceId = UUID.randomUUID().toString();
|
||||
createdControllerServices.add(new CreatedControllerService(serviceId, implementationClassName, serviceProperties));
|
||||
return serviceId;
|
||||
}
|
||||
|
||||
public record CreatedControllerService(String id, String implementationClassName, Map<String, String> serviceProperties) {
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.nifi.util;
|
||||
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public interface PropertyMigrationResult {
|
||||
|
||||
/**
|
||||
* @return a set containing the names of all properties that were removed
|
||||
*/
|
||||
Set<String> getPropertiesRemoved();
|
||||
|
||||
/**
|
||||
* @return a mapping of previous property names to the new names of those properties
|
||||
*/
|
||||
Map<String, String> getPropertiesRenamed();
|
||||
|
||||
/**
|
||||
* @return a set of all controller services that were added
|
||||
*/
|
||||
Set<MockPropertyConfiguration.CreatedControllerService> getCreatedControllerServices();
|
||||
|
||||
/**
|
||||
* @return a set of all properties whose values were updated via calls to {@link org.apache.nifi.migration.PropertyConfiguration#setProperty(String, String)} or
|
||||
* {@link org.apache.nifi.migration.PropertyConfiguration#setProperty(PropertyDescriptor, String)}.
|
||||
*/
|
||||
Set<String> getPropertiesUpdated();
|
||||
}
|
|
@ -1066,4 +1066,49 @@ public class StandardProcessorTestRunner implements TestRunner {
|
|||
.collect(toSet());
|
||||
assertEquals(expectedEventTypes, actualEventTypes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PropertyMigrationResult migrateProperties() {
|
||||
final MockPropertyConfiguration mockPropertyConfiguration = new MockPropertyConfiguration(getProcessContext().getAllProperties());
|
||||
getProcessor().migrateProperties(mockPropertyConfiguration);
|
||||
|
||||
final PropertyMigrationResult migrationResult = mockPropertyConfiguration.toPropertyMigrationResult();
|
||||
final Set<MockPropertyConfiguration.CreatedControllerService> services = migrationResult.getCreatedControllerServices();
|
||||
|
||||
RuntimeException serviceCreationException = null;
|
||||
for (final MockPropertyConfiguration.CreatedControllerService service : services) {
|
||||
final ControllerService serviceImpl;
|
||||
try {
|
||||
final Class<?> clazz = Class.forName(service.implementationClassName());
|
||||
final Object newInstance = clazz.getDeclaredConstructor().newInstance();
|
||||
if (!(newInstance instanceof ControllerService)) {
|
||||
throw new RuntimeException(clazz + " is not a Controller Service");
|
||||
}
|
||||
|
||||
serviceImpl = (ControllerService) newInstance;
|
||||
addControllerService(service.id(), serviceImpl, service.serviceProperties());
|
||||
} catch (final Exception e) {
|
||||
if (serviceCreationException == null) {
|
||||
if (e instanceof RuntimeException) {
|
||||
serviceCreationException = (RuntimeException) e;
|
||||
} else {
|
||||
serviceCreationException = new RuntimeException(e);
|
||||
}
|
||||
} else {
|
||||
serviceCreationException.addSuppressed(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (serviceCreationException != null) {
|
||||
throw serviceCreationException;
|
||||
}
|
||||
|
||||
final Map<String, String> updatedProperties = mockPropertyConfiguration.getRawProperties();
|
||||
final MockProcessContext processContext = getProcessContext();
|
||||
processContext.clearProperties();
|
||||
updatedProperties.forEach(processContext::setProperty);
|
||||
|
||||
return migrationResult;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.nifi.components.ValidationResult;
|
|||
import org.apache.nifi.controller.ControllerService;
|
||||
import org.apache.nifi.controller.queue.QueueSize;
|
||||
import org.apache.nifi.flowfile.FlowFile;
|
||||
import org.apache.nifi.migration.PropertyConfiguration;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessSessionFactory;
|
||||
|
@ -1064,4 +1065,13 @@ public interface TestRunner {
|
|||
* @param eventType Provenance event type
|
||||
*/
|
||||
void assertProvenanceEvent(ProvenanceEventType eventType);
|
||||
|
||||
/**
|
||||
* Causes the TestRunner to call the Processor's {@link Processor#migrateProperties(PropertyConfiguration)} method. The effects that are
|
||||
* caused by calling the method are applied, as they would be in a running NiFi instance. Unlike in a running NiFi instance, though, the
|
||||
* operations that were performed are captured so that they can be examined and assertions made about the migration that occurred.
|
||||
*
|
||||
* @return the results of migrating properties
|
||||
*/
|
||||
PropertyMigrationResult migrateProperties();
|
||||
}
|
||||
|
|
|
@ -19,12 +19,7 @@ package org.apache.nifi.processors.aws;
|
|||
import com.amazonaws.AmazonWebServiceClient;
|
||||
import com.amazonaws.ClientConfiguration;
|
||||
import com.amazonaws.Protocol;
|
||||
import com.amazonaws.auth.AWSCredentials;
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import com.amazonaws.auth.AWSStaticCredentialsProvider;
|
||||
import com.amazonaws.auth.AnonymousAWSCredentials;
|
||||
import com.amazonaws.auth.BasicAWSCredentials;
|
||||
import com.amazonaws.auth.PropertiesCredentials;
|
||||
import com.amazonaws.client.builder.AwsClientBuilder;
|
||||
import com.amazonaws.http.conn.ssl.SdkTLSSocketFactory;
|
||||
import com.amazonaws.regions.Region;
|
||||
|
@ -39,30 +34,25 @@ import org.apache.nifi.components.ConfigVerificationResult;
|
|||
import org.apache.nifi.components.ConfigVerificationResult.Outcome;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.components.PropertyValue;
|
||||
import org.apache.nifi.components.ValidationContext;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.logging.ComponentLog;
|
||||
import org.apache.nifi.migration.PropertyConfiguration;
|
||||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.VerifiableProcessor;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderService;
|
||||
import org.apache.nifi.proxy.ProxyConfiguration;
|
||||
import org.apache.nifi.proxy.ProxyConfigurationService;
|
||||
import org.apache.nifi.proxy.ProxySpec;
|
||||
import org.apache.nifi.ssl.SSLContextService;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.Proxy;
|
||||
import java.net.Proxy.Type;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -75,49 +65,33 @@ import java.util.concurrent.TimeUnit;
|
|||
*
|
||||
* @see <a href="http://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/AWSCredentialsProvider.html">AWSCredentialsProvider</a>
|
||||
*/
|
||||
public abstract class AbstractAWSCredentialsProviderProcessor<ClientType extends AmazonWebServiceClient> extends AbstractProcessor
|
||||
implements VerifiableProcessor {
|
||||
public abstract class AbstractAWSCredentialsProviderProcessor<ClientType extends AmazonWebServiceClient> extends AbstractProcessor implements VerifiableProcessor {
|
||||
|
||||
private static final String CREDENTIALS_SERVICE_CLASSNAME = "org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService";
|
||||
private static final String PROXY_SERVICE_CLASSNAME = "org.apache.nifi.proxy.StandardProxyConfigurationService";
|
||||
|
||||
// Obsolete property names
|
||||
private static final String OBSOLETE_ACCESS_KEY = "Access Key";
|
||||
private static final String OBSOLETE_SECRET_KEY = "Secret Key";
|
||||
private static final String OBSOLETE_CREDENTIALS_FILE = "Credentials File";
|
||||
private static final String OBSOLETE_PROXY_HOST = "Proxy Host";
|
||||
private static final String OBSOLETE_PROXY_PORT = "Proxy Host Port";
|
||||
private static final String OBSOLETE_PROXY_USERNAME = "proxy-user-name";
|
||||
private static final String OBSOLETE_PROXY_PASSWORD = "proxy-user-password";
|
||||
|
||||
// Controller Service property names
|
||||
private static final String AUTH_SERVICE_ACCESS_KEY = "Access Key";
|
||||
private static final String AUTH_SERVICE_SECRET_KEY = "Secret Key";
|
||||
private static final String AUTH_SERVICE_CREDENTIALS_FILE = "Credentials File";
|
||||
private static final String AUTH_SERVICE_DEFAULT_CREDENTIALS = "default-credentials";
|
||||
private static final String PROXY_SERVICE_HOST = "proxy-server-host";
|
||||
private static final String PROXY_SERVICE_PORT = "proxy-server-port";
|
||||
private static final String PROXY_SERVICE_USERNAME = "proxy-user-name";
|
||||
private static final String PROXY_SERVICE_PASSWORD = "proxy-user-password";
|
||||
private static final String PROXY_SERVICE_TYPE = "proxy-type";
|
||||
|
||||
|
||||
// Property Descriptors
|
||||
public static final PropertyDescriptor CREDENTIALS_FILE = CredentialPropertyDescriptors.CREDENTIALS_FILE;
|
||||
public static final PropertyDescriptor ACCESS_KEY = CredentialPropertyDescriptors.ACCESS_KEY_ID;
|
||||
public static final PropertyDescriptor SECRET_KEY = CredentialPropertyDescriptors.SECRET_KEY;
|
||||
|
||||
public static final PropertyDescriptor PROXY_HOST = new PropertyDescriptor.Builder()
|
||||
.name("Proxy Host")
|
||||
.description("Proxy host name or IP")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROXY_HOST_PORT = new PropertyDescriptor.Builder()
|
||||
.name("Proxy Host Port")
|
||||
.description("Proxy host port")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.PORT_VALIDATOR)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROXY_USERNAME = new PropertyDescriptor.Builder()
|
||||
.name("proxy-user-name")
|
||||
.displayName("Proxy Username")
|
||||
.description("Proxy username")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROXY_PASSWORD = new PropertyDescriptor.Builder()
|
||||
.name("proxy-user-password")
|
||||
.displayName("Proxy Password")
|
||||
.description("Proxy password")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(true)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor REGION = new PropertyDescriptor.Builder()
|
||||
.name("Region")
|
||||
.description("The AWS Region to connect to.")
|
||||
|
@ -152,12 +126,20 @@ public abstract class AbstractAWSCredentialsProviderProcessor<ClientType extends
|
|||
.build();
|
||||
|
||||
public static final PropertyDescriptor AWS_CREDENTIALS_PROVIDER_SERVICE = new PropertyDescriptor.Builder()
|
||||
.name("AWS Credentials Provider service")
|
||||
.displayName("AWS Credentials Provider Service")
|
||||
.description("The Controller Service that is used to obtain AWS credentials provider")
|
||||
.required(false)
|
||||
.identifiesControllerService(AWSCredentialsProviderService.class)
|
||||
.build();
|
||||
.name("AWS Credentials Provider service")
|
||||
.displayName("AWS Credentials Provider Service")
|
||||
.description("The Controller Service that is used to obtain AWS credentials provider")
|
||||
.required(true)
|
||||
.identifiesControllerService(AWSCredentialsProviderService.class)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROXY_CONFIGURATION_SERVICE = new PropertyDescriptor.Builder()
|
||||
.name("proxy-configuration-service")
|
||||
.displayName("Proxy Configuration Service")
|
||||
.description("Specifies the Proxy Configuration Controller Service to proxy network requests.")
|
||||
.identifiesControllerService(ProxyConfigurationService.class)
|
||||
.required(false)
|
||||
.build();
|
||||
|
||||
|
||||
// Relationships
|
||||
|
@ -173,11 +155,6 @@ public abstract class AbstractAWSCredentialsProviderProcessor<ClientType extends
|
|||
public static final Set<Relationship> relationships = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
|
||||
|
||||
// Constants
|
||||
private static final ProxySpec[] PROXY_SPECS = {ProxySpec.HTTP_AUTH};
|
||||
public static final PropertyDescriptor PROXY_CONFIGURATION_SERVICE = ProxyConfiguration.createProxyConfigPropertyDescriptor(true, PROXY_SPECS);
|
||||
|
||||
|
||||
// Member variables
|
||||
private final Cache<String, ClientType> clientCache = Caffeine.newBuilder()
|
||||
.maximumSize(10)
|
||||
|
@ -200,7 +177,6 @@ public abstract class AbstractAWSCredentialsProviderProcessor<ClientType extends
|
|||
this.clientCache.cleanUp();
|
||||
}
|
||||
|
||||
|
||||
public static AllowableValue createAllowableValue(final Regions region) {
|
||||
return new AllowableValue(region.getName(), region.getDescription(), "AWS Region Code : " + region.getName());
|
||||
}
|
||||
|
@ -213,72 +189,56 @@ public abstract class AbstractAWSCredentialsProviderProcessor<ClientType extends
|
|||
return values.toArray(new AllowableValue[0]);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
|
||||
final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(validationContext));
|
||||
|
||||
final boolean accessKeySet = validationContext.getProperty(ACCESS_KEY).isSet();
|
||||
final boolean secretKeySet = validationContext.getProperty(SECRET_KEY).isSet();
|
||||
if ((accessKeySet && !secretKeySet) || (secretKeySet && !accessKeySet)) {
|
||||
validationResults.add(new ValidationResult.Builder().input("Access Key").valid(false).explanation("If setting Secret Key or Access Key, must set both").build());
|
||||
}
|
||||
|
||||
final boolean credentialsFileSet = validationContext.getProperty(CREDENTIALS_FILE).isSet();
|
||||
if ((secretKeySet || accessKeySet) && credentialsFileSet) {
|
||||
validationResults.add(new ValidationResult.Builder().input("Access Key").valid(false).explanation("Cannot set both Credentials File and Secret Key/Access Key").build());
|
||||
}
|
||||
|
||||
final boolean proxyHostSet = validationContext.getProperty(PROXY_HOST).isSet();
|
||||
final boolean proxyPortSet = validationContext.getProperty(PROXY_HOST_PORT).isSet();
|
||||
final boolean proxyConfigServiceSet = validationContext.getProperty(ProxyConfigurationService.PROXY_CONFIGURATION_SERVICE).isSet();
|
||||
|
||||
if ((proxyHostSet && !proxyPortSet) || (!proxyHostSet && proxyPortSet)) {
|
||||
validationResults.add(new ValidationResult.Builder().subject("Proxy Host and Port").valid(false).explanation("If Proxy Host or Proxy Port is set, both must be set").build());
|
||||
}
|
||||
|
||||
final boolean proxyUserSet = validationContext.getProperty(PROXY_USERNAME).isSet();
|
||||
final boolean proxyPwdSet = validationContext.getProperty(PROXY_PASSWORD).isSet();
|
||||
|
||||
if ((proxyUserSet && !proxyPwdSet) || (!proxyUserSet && proxyPwdSet)) {
|
||||
validationResults.add(new ValidationResult.Builder().subject("Proxy User and Password").valid(false).explanation("If Proxy Username or Proxy Password is set, both must be set").build());
|
||||
}
|
||||
|
||||
if (proxyUserSet && !proxyHostSet) {
|
||||
validationResults.add(new ValidationResult.Builder().subject("Proxy").valid(false).explanation("If Proxy Username or Proxy Password").build());
|
||||
}
|
||||
|
||||
ProxyConfiguration.validateProxySpec(validationContext, validationResults, PROXY_SPECS);
|
||||
|
||||
if (proxyHostSet && proxyConfigServiceSet) {
|
||||
validationResults.add(new ValidationResult.Builder().subject("Proxy Configuration Service").valid(false)
|
||||
.explanation("Either Proxy Username and Proxy Password must be set or Proxy Configuration Service but not both").build());
|
||||
}
|
||||
|
||||
return validationResults;
|
||||
public void migrateProperties(final PropertyConfiguration config) {
|
||||
migrateAuthenticationProperties(config);
|
||||
migrateProxyProperties(config);
|
||||
}
|
||||
|
||||
private void migrateAuthenticationProperties(final PropertyConfiguration config) {
|
||||
if (config.isPropertySet(OBSOLETE_ACCESS_KEY) && config.isPropertySet(OBSOLETE_SECRET_KEY)) {
|
||||
final String serviceId = config.createControllerService(CREDENTIALS_SERVICE_CLASSNAME, Map.of(
|
||||
AUTH_SERVICE_ACCESS_KEY, config.getRawPropertyValue(OBSOLETE_ACCESS_KEY).get(),
|
||||
AUTH_SERVICE_SECRET_KEY, config.getRawPropertyValue(OBSOLETE_SECRET_KEY).get()));
|
||||
|
||||
protected AWSCredentials getCredentials(final PropertyContext context) {
|
||||
final String accessKey = context.getProperty(ACCESS_KEY).evaluateAttributeExpressions().getValue();
|
||||
final String secretKey = context.getProperty(SECRET_KEY).evaluateAttributeExpressions().getValue();
|
||||
config.setProperty(AWS_CREDENTIALS_PROVIDER_SERVICE.getName(), serviceId);
|
||||
} else if (config.isPropertySet(OBSOLETE_CREDENTIALS_FILE)) {
|
||||
final String serviceId = config.createControllerService(CREDENTIALS_SERVICE_CLASSNAME, Map.of(
|
||||
AUTH_SERVICE_CREDENTIALS_FILE, config.getRawPropertyValue(OBSOLETE_CREDENTIALS_FILE).get()));
|
||||
|
||||
final String credentialsFile = context.getProperty(CREDENTIALS_FILE).getValue();
|
||||
|
||||
if (credentialsFile != null) {
|
||||
try {
|
||||
return new PropertiesCredentials(new File(credentialsFile));
|
||||
} catch (final IOException ioe) {
|
||||
throw new ProcessException("Could not read Credentials File", ioe);
|
||||
}
|
||||
config.setProperty(AWS_CREDENTIALS_PROVIDER_SERVICE, serviceId);
|
||||
} else if (!config.isPropertySet(AWS_CREDENTIALS_PROVIDER_SERVICE)) {
|
||||
final String serviceId = config.createControllerService(CREDENTIALS_SERVICE_CLASSNAME, Map.of(
|
||||
AUTH_SERVICE_DEFAULT_CREDENTIALS, "true"));
|
||||
config.setProperty(AWS_CREDENTIALS_PROVIDER_SERVICE, serviceId);
|
||||
}
|
||||
|
||||
if (accessKey != null && secretKey != null) {
|
||||
return new BasicAWSCredentials(accessKey, secretKey);
|
||||
}
|
||||
|
||||
return new AnonymousAWSCredentials();
|
||||
config.removeProperty(OBSOLETE_ACCESS_KEY);
|
||||
config.removeProperty(OBSOLETE_SECRET_KEY);
|
||||
config.removeProperty(OBSOLETE_CREDENTIALS_FILE);
|
||||
}
|
||||
|
||||
private void migrateProxyProperties(final PropertyConfiguration config) {
|
||||
if (config.isPropertySet(OBSOLETE_PROXY_HOST)) {
|
||||
final Map<String, String> proxyProperties = new HashMap<>();
|
||||
proxyProperties.put(PROXY_SERVICE_TYPE, Type.HTTP.name());
|
||||
proxyProperties.put(PROXY_SERVICE_HOST, config.getRawPropertyValue(OBSOLETE_PROXY_HOST).get());
|
||||
|
||||
// Map any optional proxy configs
|
||||
config.getRawPropertyValue(OBSOLETE_PROXY_PORT).ifPresent(value -> proxyProperties.put(PROXY_SERVICE_PORT, value));
|
||||
config.getRawPropertyValue(OBSOLETE_PROXY_USERNAME).ifPresent(value -> proxyProperties.put(PROXY_SERVICE_USERNAME, value));
|
||||
config.getRawPropertyValue(OBSOLETE_PROXY_PASSWORD).ifPresent(value -> proxyProperties.put(PROXY_SERVICE_PASSWORD, value));
|
||||
|
||||
final String serviceId = config.createControllerService(PROXY_SERVICE_CLASSNAME, proxyProperties);
|
||||
config.setProperty(PROXY_CONFIGURATION_SERVICE, serviceId);
|
||||
}
|
||||
|
||||
config.removeProperty(OBSOLETE_PROXY_HOST);
|
||||
config.removeProperty(OBSOLETE_PROXY_PORT);
|
||||
config.removeProperty(OBSOLETE_PROXY_USERNAME);
|
||||
config.removeProperty(OBSOLETE_PROXY_PASSWORD);
|
||||
}
|
||||
|
||||
protected ClientConfiguration createConfiguration(final ProcessContext context) {
|
||||
return createConfiguration(context, context.getMaxConcurrentTasks());
|
||||
|
@ -306,22 +266,11 @@ public abstract class AbstractAWSCredentialsProviderProcessor<ClientType extends
|
|||
}
|
||||
|
||||
final ProxyConfiguration proxyConfig = ProxyConfiguration.getConfiguration(context, () -> {
|
||||
if (context.getProperty(PROXY_HOST).isSet()) {
|
||||
final ProxyConfiguration componentProxyConfig = new ProxyConfiguration();
|
||||
String proxyHost = context.getProperty(PROXY_HOST).evaluateAttributeExpressions().getValue();
|
||||
Integer proxyPort = context.getProperty(PROXY_HOST_PORT).evaluateAttributeExpressions().asInteger();
|
||||
String proxyUsername = context.getProperty(PROXY_USERNAME).evaluateAttributeExpressions().getValue();
|
||||
String proxyPassword = context.getProperty(PROXY_PASSWORD).evaluateAttributeExpressions().getValue();
|
||||
componentProxyConfig.setProxyType(Proxy.Type.HTTP);
|
||||
componentProxyConfig.setProxyServerHost(proxyHost);
|
||||
componentProxyConfig.setProxyServerPort(proxyPort);
|
||||
componentProxyConfig.setProxyUserName(proxyUsername);
|
||||
componentProxyConfig.setProxyUserPassword(proxyPassword);
|
||||
return componentProxyConfig;
|
||||
} else if (context.getProperty(ProxyConfigurationService.PROXY_CONFIGURATION_SERVICE).isSet()) {
|
||||
final ProxyConfigurationService configurationService = context.getProperty(ProxyConfigurationService.PROXY_CONFIGURATION_SERVICE).asControllerService(ProxyConfigurationService.class);
|
||||
if (context.getProperty(PROXY_CONFIGURATION_SERVICE).isSet()) {
|
||||
final ProxyConfigurationService configurationService = context.getProperty(PROXY_CONFIGURATION_SERVICE).asControllerService(ProxyConfigurationService.class);
|
||||
return configurationService.getConfiguration();
|
||||
}
|
||||
|
||||
return ProxyConfiguration.DIRECT_CONFIGURATION;
|
||||
});
|
||||
|
||||
|
@ -392,21 +341,8 @@ public abstract class AbstractAWSCredentialsProviderProcessor<ClientType extends
|
|||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get credentials provider using the {@link AWSCredentialsProviderService}
|
||||
* @param context the process context
|
||||
* @return AWSCredentialsProvider the credential provider
|
||||
* @see <a href="http://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/AWSCredentialsProvider.html">AWSCredentialsProvider</a>
|
||||
*/
|
||||
protected AWSCredentialsProvider getCredentialsProvider(final ProcessContext context) {
|
||||
final AWSCredentialsProviderService awsCredentialsProviderService =
|
||||
context.getProperty(AWS_CREDENTIALS_PROVIDER_SERVICE).asControllerService(AWSCredentialsProviderService.class);
|
||||
|
||||
if (awsCredentialsProviderService == null) {
|
||||
final AWSCredentials credentials = getCredentials(context);
|
||||
return new AWSStaticCredentialsProvider(credentials);
|
||||
}
|
||||
|
||||
final AWSCredentialsProviderService awsCredentialsProviderService = context.getProperty(AWS_CREDENTIALS_PROVIDER_SERVICE).asControllerService(AWSCredentialsProviderService.class);
|
||||
return awsCredentialsProviderService.getCredentialsProvider();
|
||||
}
|
||||
|
||||
|
|
|
@ -1,283 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.nifi.processors.aws.credentials.provider.factory;
|
||||
|
||||
import com.amazonaws.auth.Signer;
|
||||
import org.apache.nifi.components.AllowableValue;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.components.resource.ResourceCardinality;
|
||||
import org.apache.nifi.components.resource.ResourceType;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.ssl.SSLContextService;
|
||||
import software.amazon.awssdk.regions.Region;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.nifi.processors.aws.signer.AwsSignerType.AWS_V4_SIGNER;
|
||||
import static org.apache.nifi.processors.aws.signer.AwsSignerType.CUSTOM_SIGNER;
|
||||
import static org.apache.nifi.processors.aws.signer.AwsSignerType.DEFAULT_SIGNER;
|
||||
|
||||
/**
|
||||
* Shared definitions of properties that specify various AWS credentials.
|
||||
*
|
||||
* @see <a href="http://docs.aws.amazon.com/AWSSdkDocsJava/latest/DeveloperGuide/credentials.html">
|
||||
* Providing AWS Credentials in the AWS SDK for Java</a>
|
||||
*/
|
||||
public class CredentialPropertyDescriptors {
|
||||
|
||||
/**
|
||||
* Specifies use of the Default Credential Provider Chain
|
||||
*
|
||||
* @see <a href="http://docs.aws.amazon.com/AWSSdkDocsJava/latest/DeveloperGuide/credentials.html#id1">
|
||||
* AWS SDK: Default Credential Provider Chain
|
||||
* </a>
|
||||
*/
|
||||
public static final PropertyDescriptor USE_DEFAULT_CREDENTIALS = new PropertyDescriptor.Builder()
|
||||
.name("default-credentials")
|
||||
.displayName("Use Default Credentials")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.allowableValues("true", "false")
|
||||
.defaultValue("false")
|
||||
.description("If true, uses the Default Credential chain, including EC2 instance profiles or roles, " +
|
||||
"environment variables, default user credentials, etc.")
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor CREDENTIALS_FILE = new PropertyDescriptor.Builder()
|
||||
.name("Credentials File")
|
||||
.displayName("Credentials File")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.identifiesExternalResource(ResourceCardinality.SINGLE, ResourceType.FILE)
|
||||
.description("Path to a file containing AWS access key and secret key in properties file format.")
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ACCESS_KEY_ID = new PropertyDescriptor.Builder()
|
||||
.name("Access Key")
|
||||
.displayName("Access Key ID")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(true)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor SECRET_KEY = new PropertyDescriptor.Builder()
|
||||
.name("Secret Key")
|
||||
.displayName("Secret Access Key")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(true)
|
||||
.build();
|
||||
|
||||
/**
|
||||
* Specifies use of a named profile credential.
|
||||
*
|
||||
* @see <a href="http://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/profile/ProfileCredentialsProvider.html">
|
||||
* ProfileCredentialsProvider</a>
|
||||
*/
|
||||
public static final PropertyDescriptor PROFILE_NAME = new PropertyDescriptor.Builder()
|
||||
.name("profile-name")
|
||||
.displayName("Profile Name")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("The AWS profile name for credentials from the profile configuration file.")
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor USE_ANONYMOUS_CREDENTIALS = new PropertyDescriptor.Builder()
|
||||
.name("anonymous-credentials")
|
||||
.displayName("Use Anonymous Credentials")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.allowableValues("true", "false")
|
||||
.defaultValue("false")
|
||||
.description("If true, uses Anonymous credentials")
|
||||
.build();
|
||||
|
||||
/**
|
||||
* AWS Role Arn used for cross account access
|
||||
*
|
||||
* @see <a href="http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html#genref-arns">AWS ARN</a>
|
||||
*/
|
||||
public static final PropertyDescriptor ASSUME_ROLE_ARN = new PropertyDescriptor.Builder()
|
||||
.name("Assume Role ARN")
|
||||
.displayName("Assume Role ARN")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("The AWS Role ARN for cross account access. This is used in conjunction with Assume Role Session Name and other Assume Role properties.")
|
||||
.build();
|
||||
|
||||
/**
|
||||
* The role name while creating aws role
|
||||
*/
|
||||
public static final PropertyDescriptor ASSUME_ROLE_NAME = new PropertyDescriptor.Builder()
|
||||
.name("Assume Role Session Name")
|
||||
.displayName("Assume Role Session Name")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(true)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("The AWS Role Session Name for cross account access. This is used in conjunction with Assume Role ARN.")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
/**
|
||||
* Max session time for role based credentials. The range is between 900 and 3600 seconds.
|
||||
*/
|
||||
public static final PropertyDescriptor MAX_SESSION_TIME = new PropertyDescriptor.Builder()
|
||||
.name("Session Time")
|
||||
.displayName("Assume Role Session Time")
|
||||
.description("Session time for role based session (between 900 and 3600 seconds). This is used in conjunction with Assume Role ARN.")
|
||||
.defaultValue("3600")
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
/**
|
||||
* The ExternalId used while creating aws role.
|
||||
*/
|
||||
public static final PropertyDescriptor ASSUME_ROLE_EXTERNAL_ID = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-external-id")
|
||||
.displayName("Assume Role External ID")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("External ID for cross-account access. This is used in conjunction with Assume Role ARN.")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-ssl-context-service")
|
||||
.displayName("Assume Role SSL Context Service")
|
||||
.description("SSL Context Service used when connecting to the STS Endpoint.")
|
||||
.identifiesControllerService(SSLContextService.class)
|
||||
.required(false)
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
/**
|
||||
* Assume Role Proxy variables for configuring proxy to retrieve keys
|
||||
*/
|
||||
public static final PropertyDescriptor ASSUME_ROLE_PROXY_HOST = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-proxy-host")
|
||||
.displayName("Assume Role Proxy Host")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("Proxy host for cross-account access, if needed within your environment. This will configure a proxy to request for temporary access keys into another AWS account.")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_PROXY_PORT = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-proxy-port")
|
||||
.displayName("Assume Role Proxy Port")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("Proxy port for cross-account access, if needed within your environment. This will configure a proxy to request for temporary access keys into another AWS account.")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_ENDPOINT = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-sts-endpoint")
|
||||
.displayName("Assume Role STS Endpoint Override")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("The default AWS Security Token Service (STS) endpoint (\"sts.amazonaws.com\") works for " +
|
||||
"all accounts that are not for China (Beijing) region or GovCloud. You only need to set " +
|
||||
"this property to \"sts.cn-north-1.amazonaws.com.cn\" when you are requesting session credentials " +
|
||||
"for services in China(Beijing) region or to \"sts.us-gov-west-1.amazonaws.com\" for GovCloud.")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_REGION = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-sts-region")
|
||||
.displayName("Assume Role STS Region")
|
||||
.description("The AWS Security Token Service (STS) region")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.allowableValues(getAvailableRegions())
|
||||
.defaultValue(createAllowableValue(Region.US_WEST_2).getValue())
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_SIGNER_OVERRIDE = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-sts-signer-override")
|
||||
.displayName("Assume Role STS Signer Override")
|
||||
.description("The AWS STS library uses Signature Version 4 by default. This property allows you to plug in your own custom signer implementation.")
|
||||
.required(false)
|
||||
.allowableValues(EnumSet.of(
|
||||
DEFAULT_SIGNER,
|
||||
AWS_V4_SIGNER,
|
||||
CUSTOM_SIGNER))
|
||||
.defaultValue(DEFAULT_SIGNER.getValue())
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_CUSTOM_SIGNER_CLASS_NAME = new PropertyDescriptor.Builder()
|
||||
.name("custom-signer-class-name")
|
||||
.displayName("Custom Signer Class Name")
|
||||
.description(String.format("Fully qualified class name of the custom signer class. The signer must implement %s interface.", Signer.class.getName()))
|
||||
.required(true)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.dependsOn(ASSUME_ROLE_STS_SIGNER_OVERRIDE, CUSTOM_SIGNER)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_CUSTOM_SIGNER_MODULE_LOCATION = new PropertyDescriptor.Builder()
|
||||
.name("custom-signer-module-location")
|
||||
.displayName("Custom Signer Module Location")
|
||||
.description("Comma-separated list of paths to files and/or directories which contain the custom signer's JAR file and its dependencies (if any).")
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.identifiesExternalResource(ResourceCardinality.MULTIPLE, ResourceType.FILE, ResourceType.DIRECTORY)
|
||||
.dependsOn(ASSUME_ROLE_STS_SIGNER_OVERRIDE, CUSTOM_SIGNER)
|
||||
.dynamicallyModifiesClasspath(true)
|
||||
.build();
|
||||
|
||||
public static AllowableValue createAllowableValue(final Region region) {
|
||||
return new AllowableValue(region.id(), region.metadata().description(), "AWS Region Code : " + region.id());
|
||||
}
|
||||
|
||||
public static AllowableValue[] getAvailableRegions() {
|
||||
final List<AllowableValue> values = new ArrayList<>();
|
||||
for (final Region region : Region.regions()) {
|
||||
if (region.isGlobalRegion()) {
|
||||
continue;
|
||||
}
|
||||
values.add(createAllowableValue(region));
|
||||
}
|
||||
return values.toArray(new AllowableValue[0]);
|
||||
}
|
||||
}
|
|
@ -24,11 +24,9 @@ import org.apache.nifi.annotation.lifecycle.OnStopped;
|
|||
import org.apache.nifi.components.ConfigVerificationResult;
|
||||
import org.apache.nifi.components.ConfigVerificationResult.Outcome;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.components.ValidationContext;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.logging.ComponentLog;
|
||||
import org.apache.nifi.migration.PropertyConfiguration;
|
||||
import org.apache.nifi.processor.AbstractSessionFactoryProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
|
@ -37,17 +35,11 @@ import org.apache.nifi.processor.Relationship;
|
|||
import org.apache.nifi.processor.VerifiableProcessor;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.PropertiesCredentialsProvider;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderService;
|
||||
import org.apache.nifi.proxy.ProxyConfiguration;
|
||||
import org.apache.nifi.proxy.ProxyConfigurationService;
|
||||
import org.apache.nifi.proxy.ProxySpec;
|
||||
import org.apache.nifi.ssl.SSLContextService;
|
||||
import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider;
|
||||
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
|
||||
import software.amazon.awssdk.awscore.client.builder.AwsClientBuilder;
|
||||
import software.amazon.awssdk.core.SdkClient;
|
||||
import software.amazon.awssdk.core.client.builder.SdkClientBuilder;
|
||||
|
@ -58,16 +50,13 @@ import software.amazon.awssdk.http.TlsKeyManagersProvider;
|
|||
import software.amazon.awssdk.regions.Region;
|
||||
|
||||
import javax.net.ssl.TrustManager;
|
||||
import java.io.File;
|
||||
import java.net.Proxy;
|
||||
import java.net.Proxy.Type;
|
||||
import java.net.URI;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -81,6 +70,29 @@ import java.util.concurrent.TimeUnit;
|
|||
* @see <a href="https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/auth/credentials/AwsCredentialsProvider.html">AwsCredentialsProvider</a>
|
||||
*/
|
||||
public abstract class AbstractAwsProcessor<T extends SdkClient> extends AbstractSessionFactoryProcessor implements VerifiableProcessor {
|
||||
private static final String CREDENTIALS_SERVICE_CLASSNAME = "org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService";
|
||||
private static final String PROXY_SERVICE_CLASSNAME = "org.apache.nifi.proxy.StandardProxyConfigurationService";
|
||||
|
||||
// Obsolete property names
|
||||
private static final String OBSOLETE_ACCESS_KEY = "Access Key";
|
||||
private static final String OBSOLETE_SECRET_KEY = "Secret Key";
|
||||
private static final String OBSOLETE_CREDENTIALS_FILE = "Credentials File";
|
||||
private static final String OBSOLETE_PROXY_HOST = "Proxy Host";
|
||||
private static final String OBSOLETE_PROXY_PORT = "Proxy Host Port";
|
||||
private static final String OBSOLETE_PROXY_USERNAME = "proxy-user-name";
|
||||
private static final String OBSOLETE_PROXY_PASSWORD = "proxy-user-password";
|
||||
|
||||
// Controller Service property names
|
||||
private static final String AUTH_SERVICE_ACCESS_KEY = "Access Key";
|
||||
private static final String AUTH_SERVICE_SECRET_KEY = "Secret Key";
|
||||
private static final String AUTH_SERVICE_CREDENTIALS_FILE = "Credentials File";
|
||||
private static final String AUTH_SERVICE_DEFAULT_CREDENTIALS = "default-credentials";
|
||||
private static final String PROXY_SERVICE_HOST = "proxy-server-host";
|
||||
private static final String PROXY_SERVICE_PORT = "proxy-server-port";
|
||||
private static final String PROXY_SERVICE_USERNAME = "proxy-user-name";
|
||||
private static final String PROXY_SERVICE_PASSWORD = "proxy-user-password";
|
||||
private static final String PROXY_SERVICE_TYPE = "proxy-type";
|
||||
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
|
@ -92,50 +104,7 @@ public abstract class AbstractAwsProcessor<T extends SdkClient> extends Abstract
|
|||
.description("FlowFiles are routed to failure relationship")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> relationships = Collections.unmodifiableSet(
|
||||
new LinkedHashSet<>(Arrays.asList(REL_SUCCESS, REL_FAILURE))
|
||||
);
|
||||
|
||||
public static final PropertyDescriptor CREDENTIALS_FILE = CredentialPropertyDescriptors.CREDENTIALS_FILE;
|
||||
|
||||
public static final PropertyDescriptor ACCESS_KEY = CredentialPropertyDescriptors.ACCESS_KEY_ID;
|
||||
|
||||
public static final PropertyDescriptor SECRET_KEY = CredentialPropertyDescriptors.SECRET_KEY;
|
||||
|
||||
public static final PropertyDescriptor PROXY_HOST = new PropertyDescriptor.Builder()
|
||||
.name("Proxy Host")
|
||||
.description("Proxy host name or IP")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROXY_HOST_PORT = new PropertyDescriptor.Builder()
|
||||
.name("Proxy Host Port")
|
||||
.description("Proxy host port")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.PORT_VALIDATOR)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROXY_USERNAME = new PropertyDescriptor.Builder()
|
||||
.name("proxy-user-name")
|
||||
.displayName("Proxy Username")
|
||||
.description("Proxy username")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROXY_PASSWORD = new PropertyDescriptor.Builder()
|
||||
.name("proxy-user-password")
|
||||
.displayName("Proxy Password")
|
||||
.description("Proxy password")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(true)
|
||||
.build();
|
||||
private static final Set<Relationship> relationships = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
|
||||
public static final PropertyDescriptor REGION = new PropertyDescriptor.Builder()
|
||||
.name("Region")
|
||||
|
@ -168,26 +137,25 @@ public abstract class AbstractAwsProcessor<T extends SdkClient> extends Abstract
|
|||
.addValidator(StandardValidators.URL_VALIDATOR)
|
||||
.build();
|
||||
|
||||
/**
|
||||
* AWS credentials provider service
|
||||
*
|
||||
* @see <a href="http://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/AWSCredentialsProvider.html">AWSCredentialsProvider</a>
|
||||
* @see <a href="https://sdk.amazonaws.com/java/api/2.0.0/software/amazon/awssdk/auth/credentials/AwsCredentialsProvider.html">AwsCredentialsProvider</a>
|
||||
*/
|
||||
public static final PropertyDescriptor AWS_CREDENTIALS_PROVIDER_SERVICE = new PropertyDescriptor.Builder()
|
||||
.name("AWS Credentials Provider service")
|
||||
.displayName("AWS Credentials Provider Service")
|
||||
.description("The Controller Service that is used to obtain AWS credentials provider")
|
||||
.required(false)
|
||||
.identifiesControllerService(AWSCredentialsProviderService.class)
|
||||
.build();
|
||||
.name("AWS Credentials Provider service")
|
||||
.displayName("AWS Credentials Provider Service")
|
||||
.description("The Controller Service that is used to obtain AWS credentials provider")
|
||||
.required(true)
|
||||
.identifiesControllerService(AWSCredentialsProviderService.class)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROXY_CONFIGURATION_SERVICE = new PropertyDescriptor.Builder()
|
||||
.name("proxy-configuration-service")
|
||||
.displayName("Proxy Configuration Service")
|
||||
.description("Specifies the Proxy Configuration Controller Service to proxy network requests.")
|
||||
.identifiesControllerService(ProxyConfigurationService.class)
|
||||
.required(false)
|
||||
.build();
|
||||
|
||||
|
||||
protected static final String DEFAULT_USER_AGENT = "NiFi";
|
||||
|
||||
private static final ProxySpec[] PROXY_SPECS = {ProxySpec.HTTP_AUTH};
|
||||
|
||||
public static final PropertyDescriptor PROXY_CONFIGURATION_SERVICE = ProxyConfiguration.createProxyConfigPropertyDescriptor(true, PROXY_SPECS);
|
||||
|
||||
private final Cache<Region, T> clientCache = Caffeine.newBuilder().build();
|
||||
|
||||
/**
|
||||
|
@ -226,47 +194,53 @@ public abstract class AbstractAwsProcessor<T extends SdkClient> extends Abstract
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
|
||||
final List<ValidationResult> validationResults = new ArrayList<>(super.customValidate(validationContext));
|
||||
public void migrateProperties(final PropertyConfiguration config) {
|
||||
migrateAuthenticationProperties(config);
|
||||
migrateProxyProperties(config);
|
||||
}
|
||||
|
||||
final boolean accessKeySet = validationContext.getProperty(ACCESS_KEY).isSet();
|
||||
final boolean secretKeySet = validationContext.getProperty(SECRET_KEY).isSet();
|
||||
if ((accessKeySet && !secretKeySet) || (secretKeySet && !accessKeySet)) {
|
||||
validationResults.add(new ValidationResult.Builder().input("Access Key").valid(false).explanation("If setting Secret Key or Access Key, must set both").build());
|
||||
private void migrateAuthenticationProperties(final PropertyConfiguration config) {
|
||||
if (config.isPropertySet(OBSOLETE_ACCESS_KEY) && config.isPropertySet(OBSOLETE_SECRET_KEY)) {
|
||||
final String serviceId = config.createControllerService(CREDENTIALS_SERVICE_CLASSNAME, Map.of(
|
||||
AUTH_SERVICE_ACCESS_KEY, config.getRawPropertyValue(OBSOLETE_ACCESS_KEY).get(),
|
||||
AUTH_SERVICE_SECRET_KEY, config.getRawPropertyValue(OBSOLETE_SECRET_KEY).get()));
|
||||
|
||||
config.setProperty(AWS_CREDENTIALS_PROVIDER_SERVICE.getName(), serviceId);
|
||||
} else if (config.isPropertySet(OBSOLETE_CREDENTIALS_FILE)) {
|
||||
final String serviceId = config.createControllerService(CREDENTIALS_SERVICE_CLASSNAME, Map.of(
|
||||
AUTH_SERVICE_CREDENTIALS_FILE, config.getRawPropertyValue(OBSOLETE_CREDENTIALS_FILE).get()));
|
||||
|
||||
config.setProperty(AWS_CREDENTIALS_PROVIDER_SERVICE, serviceId);
|
||||
} else if (!config.isPropertySet(AWS_CREDENTIALS_PROVIDER_SERVICE)) {
|
||||
final String serviceId = config.createControllerService(CREDENTIALS_SERVICE_CLASSNAME, Map.of(
|
||||
AUTH_SERVICE_DEFAULT_CREDENTIALS, "true"));
|
||||
config.setProperty(AWS_CREDENTIALS_PROVIDER_SERVICE, serviceId);
|
||||
}
|
||||
|
||||
final boolean credentialsFileSet = validationContext.getProperty(CREDENTIALS_FILE).isSet();
|
||||
if ((secretKeySet || accessKeySet) && credentialsFileSet) {
|
||||
validationResults.add(new ValidationResult.Builder().input("Access Key").valid(false).explanation("Cannot set both Credentials File and Secret Key/Access Key").build());
|
||||
config.removeProperty(OBSOLETE_ACCESS_KEY);
|
||||
config.removeProperty(OBSOLETE_SECRET_KEY);
|
||||
config.removeProperty(OBSOLETE_CREDENTIALS_FILE);
|
||||
}
|
||||
|
||||
private void migrateProxyProperties(final PropertyConfiguration config) {
|
||||
if (config.isPropertySet(OBSOLETE_PROXY_HOST)) {
|
||||
final Map<String, String> proxyProperties = new HashMap<>();
|
||||
proxyProperties.put(PROXY_SERVICE_TYPE, Type.HTTP.name());
|
||||
proxyProperties.put(PROXY_SERVICE_HOST, config.getRawPropertyValue(OBSOLETE_PROXY_HOST).get());
|
||||
|
||||
// Map any optional proxy configs
|
||||
config.getRawPropertyValue(OBSOLETE_PROXY_PORT).ifPresent(value -> proxyProperties.put(PROXY_SERVICE_PORT, value));
|
||||
config.getRawPropertyValue(OBSOLETE_PROXY_USERNAME).ifPresent(value -> proxyProperties.put(PROXY_SERVICE_USERNAME, value));
|
||||
config.getRawPropertyValue(OBSOLETE_PROXY_PASSWORD).ifPresent(value -> proxyProperties.put(PROXY_SERVICE_PASSWORD, value));
|
||||
|
||||
final String serviceId = config.createControllerService(PROXY_SERVICE_CLASSNAME, proxyProperties);
|
||||
config.setProperty(PROXY_CONFIGURATION_SERVICE, serviceId);
|
||||
}
|
||||
|
||||
final boolean proxyHostSet = validationContext.getProperty(PROXY_HOST).isSet();
|
||||
final boolean proxyPortSet = validationContext.getProperty(PROXY_HOST_PORT).isSet();
|
||||
final boolean proxyConfigServiceSet = validationContext.getProperty(ProxyConfigurationService.PROXY_CONFIGURATION_SERVICE).isSet();
|
||||
|
||||
if ((proxyHostSet && !proxyPortSet) || (!proxyHostSet && proxyPortSet)) {
|
||||
validationResults.add(new ValidationResult.Builder().subject("Proxy Host and Port").valid(false).explanation("If Proxy Host or Proxy Port is set, both must be set").build());
|
||||
}
|
||||
|
||||
final boolean proxyUserSet = validationContext.getProperty(PROXY_USERNAME).isSet();
|
||||
final boolean proxyPwdSet = validationContext.getProperty(PROXY_PASSWORD).isSet();
|
||||
|
||||
if ((proxyUserSet && !proxyPwdSet) || (!proxyUserSet && proxyPwdSet)) {
|
||||
validationResults.add(new ValidationResult.Builder().subject("Proxy User and Password").valid(false).explanation("If Proxy Username or Proxy Password is set, both must be set").build());
|
||||
}
|
||||
|
||||
if (proxyUserSet && !proxyHostSet) {
|
||||
validationResults.add(new ValidationResult.Builder().subject("Proxy").valid(false).explanation("If Proxy Username or Proxy Password").build());
|
||||
}
|
||||
|
||||
ProxyConfiguration.validateProxySpec(validationContext, validationResults, PROXY_SPECS);
|
||||
|
||||
if (proxyHostSet && proxyConfigServiceSet) {
|
||||
validationResults.add(new ValidationResult.Builder().subject("Proxy Configuration Service").valid(false)
|
||||
.explanation("Either Proxy Username and Proxy Password must be set or Proxy Configuration Service but not both").build());
|
||||
}
|
||||
|
||||
return validationResults;
|
||||
config.removeProperty(OBSOLETE_PROXY_HOST);
|
||||
config.removeProperty(OBSOLETE_PROXY_PORT);
|
||||
config.removeProperty(OBSOLETE_PROXY_USERNAME);
|
||||
config.removeProperty(OBSOLETE_PROXY_PASSWORD);
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
@ -350,19 +324,7 @@ public abstract class AbstractAwsProcessor<T extends SdkClient> extends Abstract
|
|||
}
|
||||
|
||||
final ProxyConfiguration proxyConfig = ProxyConfiguration.getConfiguration(context, () -> {
|
||||
if (context.getProperty(PROXY_HOST).isSet()) {
|
||||
final ProxyConfiguration componentProxyConfig = new ProxyConfiguration();
|
||||
final String proxyHost = context.getProperty(PROXY_HOST).evaluateAttributeExpressions().getValue();
|
||||
final Integer proxyPort = context.getProperty(PROXY_HOST_PORT).evaluateAttributeExpressions().asInteger();
|
||||
final String proxyUsername = context.getProperty(PROXY_USERNAME).evaluateAttributeExpressions().getValue();
|
||||
final String proxyPassword = context.getProperty(PROXY_PASSWORD).evaluateAttributeExpressions().getValue();
|
||||
componentProxyConfig.setProxyType(Proxy.Type.HTTP);
|
||||
componentProxyConfig.setProxyServerHost(proxyHost);
|
||||
componentProxyConfig.setProxyServerPort(proxyPort);
|
||||
componentProxyConfig.setProxyUserName(proxyUsername);
|
||||
componentProxyConfig.setProxyUserPassword(proxyPassword);
|
||||
return componentProxyConfig;
|
||||
} else if (context.getProperty(ProxyConfigurationService.PROXY_CONFIGURATION_SERVICE).isSet()) {
|
||||
if (context.getProperty(ProxyConfigurationService.PROXY_CONFIGURATION_SERVICE).isSet()) {
|
||||
final ProxyConfigurationService configurationService = context.getProperty(ProxyConfigurationService.PROXY_CONFIGURATION_SERVICE).asControllerService(ProxyConfigurationService.class);
|
||||
return configurationService.getConfiguration();
|
||||
}
|
||||
|
@ -411,28 +373,8 @@ public abstract class AbstractAwsProcessor<T extends SdkClient> extends Abstract
|
|||
* @return AwsCredentialsProvider the credential provider
|
||||
*/
|
||||
protected AwsCredentialsProvider getCredentialsProvider(final ProcessContext context) {
|
||||
final AWSCredentialsProviderService awsCredentialsProviderService =
|
||||
context.getProperty(AWS_CREDENTIALS_PROVIDER_SERVICE).asControllerService(AWSCredentialsProviderService.class);
|
||||
|
||||
return awsCredentialsProviderService != null ? awsCredentialsProviderService.getAwsCredentialsProvider() : createStaticCredentialsProvider(context);
|
||||
|
||||
}
|
||||
|
||||
protected AwsCredentialsProvider createStaticCredentialsProvider(final PropertyContext context) {
|
||||
final String accessKey = context.getProperty(ACCESS_KEY).evaluateAttributeExpressions().getValue();
|
||||
final String secretKey = context.getProperty(SECRET_KEY).evaluateAttributeExpressions().getValue();
|
||||
|
||||
final String credentialsFile = context.getProperty(CREDENTIALS_FILE).getValue();
|
||||
|
||||
if (credentialsFile != null) {
|
||||
return new PropertiesCredentialsProvider(new File(credentialsFile));
|
||||
}
|
||||
|
||||
if (accessKey != null && secretKey != null) {
|
||||
return StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKey, secretKey));
|
||||
}
|
||||
|
||||
return AnonymousCredentialsProvider.create();
|
||||
final AWSCredentialsProviderService awsCredentialsProviderService = context.getProperty(AWS_CREDENTIALS_PROVIDER_SERVICE).asControllerService(AWSCredentialsProviderService.class);
|
||||
return awsCredentialsProviderService.getAwsCredentialsProvider();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -147,6 +147,12 @@
|
|||
<version>1.19.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-proxy-configuration</artifactId>
|
||||
<version>2.0.0-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -188,12 +188,22 @@ public class PutCloudWatchMetric extends AbstractAwsSyncProcessor<CloudWatchClie
|
|||
.addValidator(DOUBLE_VALIDATOR)
|
||||
.build();
|
||||
|
||||
public static final List<PropertyDescriptor> properties =
|
||||
Collections.unmodifiableList(
|
||||
Arrays.asList(NAMESPACE, METRIC_NAME, VALUE, MAXIMUM, MINIMUM, SAMPLECOUNT, SUM, TIMESTAMP,
|
||||
UNIT, REGION, ACCESS_KEY, SECRET_KEY, CREDENTIALS_FILE, AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
TIMEOUT, SSL_CONTEXT_SERVICE, ENDPOINT_OVERRIDE, PROXY_HOST, PROXY_HOST_PORT, PROXY_USERNAME, PROXY_PASSWORD)
|
||||
);
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
NAMESPACE,
|
||||
METRIC_NAME,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
VALUE,
|
||||
MAXIMUM,
|
||||
MINIMUM,
|
||||
SAMPLECOUNT,
|
||||
SUM,
|
||||
TIMESTAMP,
|
||||
UNIT,
|
||||
TIMEOUT,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
ENDPOINT_OVERRIDE,
|
||||
PROXY_CONFIGURATION_SERVICE);
|
||||
|
||||
private volatile Set<String> dynamicPropertyNames = new HashSet<>();
|
||||
|
||||
|
@ -237,7 +247,7 @@ public class PutCloudWatchMetric extends AbstractAwsSyncProcessor<CloudWatchClie
|
|||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
|
||||
Collection<ValidationResult> problems = super.customValidate(validationContext);
|
||||
List<ValidationResult> problems = new ArrayList<>(super.customValidate(validationContext));
|
||||
|
||||
final boolean valueSet = validationContext.getProperty(VALUE).isSet();
|
||||
final boolean maxSet = validationContext.getProperty(MAXIMUM).isSet();
|
||||
|
@ -249,16 +259,25 @@ public class PutCloudWatchMetric extends AbstractAwsSyncProcessor<CloudWatchClie
|
|||
final boolean anyStatisticSetValue = (maxSet || minSet || sampleCountSet || sumSet);
|
||||
|
||||
if (valueSet && anyStatisticSetValue) {
|
||||
problems.add(new ValidationResult.Builder().subject("Metric").valid(false)
|
||||
.explanation("Cannot set both Value and StatisticSet(Maximum, Minimum, SampleCount, Sum) properties").build());
|
||||
problems.add(new ValidationResult.Builder()
|
||||
.subject("Metric")
|
||||
.valid(false)
|
||||
.explanation("Cannot set both Value and StatisticSet(Maximum, Minimum, SampleCount, Sum) properties")
|
||||
.build());
|
||||
} else if (!valueSet && !completeStatisticSet) {
|
||||
problems.add(new ValidationResult.Builder().subject("Metric").valid(false)
|
||||
.explanation("Must set either Value or complete StatisticSet(Maximum, Minimum, SampleCount, Sum) properties").build());
|
||||
problems.add(new ValidationResult.Builder()
|
||||
.subject("Metric")
|
||||
.valid(false)
|
||||
.explanation("Must set either Value or complete StatisticSet(Maximum, Minimum, SampleCount, Sum) properties")
|
||||
.build());
|
||||
}
|
||||
|
||||
if (dynamicPropertyNames.size() > 10) {
|
||||
problems.add(new ValidationResult.Builder().subject("Metric").valid(false)
|
||||
.explanation("Cannot set more than 10 dimensions").build());
|
||||
problems.add(new ValidationResult.Builder()
|
||||
.subject("Metric")
|
||||
.valid(false)
|
||||
.explanation("Cannot set more than 10 dimensions")
|
||||
.build());
|
||||
}
|
||||
|
||||
return problems;
|
||||
|
|
|
@ -1,139 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.nifi.processors.aws.credentials.provider.factory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.nifi.components.ValidationContext;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.ExplicitDefaultCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.AccessKeyPairCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.FileCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.NamedProfileCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.AnonymousCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.ImplicitDefaultCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.AssumeRoleCredentialsStrategy;
|
||||
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
|
||||
|
||||
/**
|
||||
* Generates AWS credentials in the form of AWSCredentialsProvider implementations for processors
|
||||
* and controller services. The factory supports a number of strategies for specifying and validating
|
||||
* AWS credentials, interpreted as an ordered list of most-preferred to least-preferred. It also supports
|
||||
* derived credential strategies like Assume Role, which require a primary credential as an input.
|
||||
*
|
||||
* Additional strategies should implement CredentialsStrategy, then be added to the strategies list in the
|
||||
* constructor.
|
||||
*
|
||||
* @see org.apache.nifi.processors.aws.credentials.provider.factory.strategies
|
||||
*/
|
||||
public class CredentialsProviderFactory {
|
||||
|
||||
private final List<CredentialsStrategy> strategies = new ArrayList<CredentialsStrategy>();
|
||||
|
||||
public CredentialsProviderFactory() {
|
||||
// Primary Credential Strategies
|
||||
strategies.add(new ExplicitDefaultCredentialsStrategy());
|
||||
strategies.add(new AccessKeyPairCredentialsStrategy());
|
||||
strategies.add(new FileCredentialsStrategy());
|
||||
strategies.add(new NamedProfileCredentialsStrategy());
|
||||
strategies.add(new AnonymousCredentialsStrategy());
|
||||
|
||||
// Implicit Default is the catch-all primary strategy
|
||||
strategies.add(new ImplicitDefaultCredentialsStrategy());
|
||||
|
||||
// Derived Credential Strategies
|
||||
strategies.add(new AssumeRoleCredentialsStrategy());
|
||||
}
|
||||
|
||||
public CredentialsStrategy selectPrimaryStrategy(final PropertyContext propertyContext) {
|
||||
for (CredentialsStrategy strategy : strategies) {
|
||||
if (strategy.canCreatePrimaryCredential(propertyContext)) {
|
||||
return strategy;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates AWS credential properties against the configured strategies to report any validation errors.
|
||||
* @return Validation errors
|
||||
*/
|
||||
public Collection<ValidationResult> validate(final ValidationContext validationContext) {
|
||||
final CredentialsStrategy selectedStrategy = selectPrimaryStrategy(validationContext);
|
||||
final ArrayList<ValidationResult> validationFailureResults = new ArrayList<ValidationResult>();
|
||||
|
||||
for (CredentialsStrategy strategy : strategies) {
|
||||
final Collection<ValidationResult> strategyValidationFailures = strategy.validate(validationContext,
|
||||
selectedStrategy);
|
||||
if (strategyValidationFailures != null) {
|
||||
validationFailureResults.addAll(strategyValidationFailures);
|
||||
}
|
||||
}
|
||||
|
||||
return validationFailureResults;
|
||||
}
|
||||
|
||||
/**
|
||||
* Produces the AWSCredentialsProvider according to the given property set and the strategies configured in
|
||||
* the factory.
|
||||
* @return AWSCredentialsProvider implementation
|
||||
*/
|
||||
public AWSCredentialsProvider getCredentialsProvider(final PropertyContext propertyContext) {
|
||||
final CredentialsStrategy primaryStrategy = selectPrimaryStrategy(propertyContext);
|
||||
AWSCredentialsProvider primaryCredentialsProvider = primaryStrategy.getCredentialsProvider(propertyContext);
|
||||
AWSCredentialsProvider derivedCredentialsProvider = null;
|
||||
|
||||
for (CredentialsStrategy strategy : strategies) {
|
||||
if (strategy.canCreateDerivedCredential(propertyContext)) {
|
||||
derivedCredentialsProvider = strategy.getDerivedCredentialsProvider(propertyContext, primaryCredentialsProvider);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (derivedCredentialsProvider != null) {
|
||||
return derivedCredentialsProvider;
|
||||
} else {
|
||||
return primaryCredentialsProvider;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Produces the AwsCredentialsProvider according to the given property set and the strategies configured in
|
||||
* the factory.
|
||||
* @return AwsCredentialsProvider implementation
|
||||
*/
|
||||
public AwsCredentialsProvider getAwsCredentialsProvider(final PropertyContext propertyContext) {
|
||||
final CredentialsStrategy primaryStrategy = selectPrimaryStrategy(propertyContext);
|
||||
final AwsCredentialsProvider primaryCredentialsProvider = primaryStrategy.getAwsCredentialsProvider(propertyContext);
|
||||
AwsCredentialsProvider derivedCredentialsProvider = null;
|
||||
|
||||
for (final CredentialsStrategy strategy : strategies) {
|
||||
if (strategy.canCreateDerivedCredential(propertyContext)) {
|
||||
derivedCredentialsProvider = strategy.getDerivedAwsCredentialsProvider(propertyContext, primaryCredentialsProvider);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return derivedCredentialsProvider == null ? primaryCredentialsProvider : derivedCredentialsProvider;
|
||||
}
|
||||
}
|
|
@ -16,9 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.aws.credentials.provider.factory.strategies;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.components.PropertyValue;
|
||||
import org.apache.nifi.components.ValidationContext;
|
||||
|
@ -26,6 +23,9 @@ import org.apache.nifi.components.ValidationResult;
|
|||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialsStrategy;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
|
||||
/**
|
||||
* Partial implementation of CredentialsStrategy to provide support for credential strategies specified by
|
||||
|
@ -33,7 +33,7 @@ import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialsSt
|
|||
*/
|
||||
public abstract class AbstractBooleanCredentialsStrategy extends AbstractCredentialsStrategy {
|
||||
|
||||
private PropertyDescriptor strategyProperty;
|
||||
private final PropertyDescriptor strategyProperty;
|
||||
|
||||
public AbstractBooleanCredentialsStrategy(final String name, final PropertyDescriptor strategyProperty) {
|
||||
super("Default Credentials", new PropertyDescriptor[]{
|
||||
|
@ -52,7 +52,7 @@ public abstract class AbstractBooleanCredentialsStrategy extends AbstractCredent
|
|||
strategyPropertyValue = strategyPropertyValue.evaluateAttributeExpressions();
|
||||
}
|
||||
final String useStrategyString = strategyPropertyValue.getValue();
|
||||
final Boolean useStrategy = Boolean.parseBoolean(useStrategyString);
|
||||
final boolean useStrategy = Boolean.parseBoolean(useStrategyString);
|
||||
return useStrategy;
|
||||
}
|
||||
|
||||
|
@ -61,17 +61,15 @@ public abstract class AbstractBooleanCredentialsStrategy extends AbstractCredent
|
|||
final CredentialsStrategy primaryStrategy) {
|
||||
final boolean thisIsSelectedStrategy = this == primaryStrategy;
|
||||
final Boolean useStrategy = validationContext.getProperty(strategyProperty).asBoolean();
|
||||
|
||||
if (!thisIsSelectedStrategy && useStrategy) {
|
||||
final String failureFormat = "property %1$s cannot be used with %2$s";
|
||||
final Collection<ValidationResult> validationFailureResults = new ArrayList<ValidationResult>();
|
||||
final String message = String.format(failureFormat, strategyProperty.getDisplayName(),
|
||||
primaryStrategy.getName());
|
||||
validationFailureResults.add(new ValidationResult.Builder()
|
||||
.subject(strategyProperty.getDisplayName())
|
||||
.valid(false)
|
||||
.explanation(message).build());
|
||||
return validationFailureResults;
|
||||
return Collections.singleton(new ValidationResult.Builder()
|
||||
.subject(strategyProperty.getDisplayName())
|
||||
.valid(false)
|
||||
.explanation(String.format("property %1$s cannot be used with %2$s", strategyProperty.getDisplayName(), primaryStrategy.getName()))
|
||||
.build());
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ import com.amazonaws.auth.AWSStaticCredentialsProvider;
|
|||
import com.amazonaws.auth.BasicAWSCredentials;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
|
||||
|
@ -36,23 +36,23 @@ public class AccessKeyPairCredentialsStrategy extends AbstractCredentialsStrateg
|
|||
|
||||
public AccessKeyPairCredentialsStrategy() {
|
||||
super("Access Key Pair", new PropertyDescriptor[] {
|
||||
CredentialPropertyDescriptors.ACCESS_KEY_ID,
|
||||
CredentialPropertyDescriptors.SECRET_KEY
|
||||
AWSCredentialsProviderControllerService.ACCESS_KEY_ID,
|
||||
AWSCredentialsProviderControllerService.SECRET_KEY
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public AWSCredentialsProvider getCredentialsProvider(final PropertyContext propertyContext) {
|
||||
final String accessKey = propertyContext.getProperty(CredentialPropertyDescriptors.ACCESS_KEY_ID).evaluateAttributeExpressions().getValue();
|
||||
final String secretKey = propertyContext.getProperty(CredentialPropertyDescriptors.SECRET_KEY).evaluateAttributeExpressions().getValue();
|
||||
final String accessKey = propertyContext.getProperty(AWSCredentialsProviderControllerService.ACCESS_KEY_ID).evaluateAttributeExpressions().getValue();
|
||||
final String secretKey = propertyContext.getProperty(AWSCredentialsProviderControllerService.SECRET_KEY).evaluateAttributeExpressions().getValue();
|
||||
final BasicAWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
|
||||
return new AWSStaticCredentialsProvider(credentials);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AwsCredentialsProvider getAwsCredentialsProvider(final PropertyContext propertyContext) {
|
||||
final String accessKey = propertyContext.getProperty(CredentialPropertyDescriptors.ACCESS_KEY_ID).evaluateAttributeExpressions().getValue();
|
||||
final String secretKey = propertyContext.getProperty(CredentialPropertyDescriptors.SECRET_KEY).evaluateAttributeExpressions().getValue();
|
||||
final String accessKey = propertyContext.getProperty(AWSCredentialsProviderControllerService.ACCESS_KEY_ID).evaluateAttributeExpressions().getValue();
|
||||
final String secretKey = propertyContext.getProperty(AWSCredentialsProviderControllerService.SECRET_KEY).evaluateAttributeExpressions().getValue();
|
||||
return software.amazon.awssdk.auth.credentials.StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKey, secretKey));
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import com.amazonaws.auth.AWSCredentialsProvider;
|
|||
import com.amazonaws.auth.AWSStaticCredentialsProvider;
|
||||
import com.amazonaws.auth.AnonymousAWSCredentials;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
|
||||
|
@ -34,7 +34,7 @@ import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
|||
public class AnonymousCredentialsStrategy extends AbstractBooleanCredentialsStrategy {
|
||||
|
||||
public AnonymousCredentialsStrategy() {
|
||||
super("Anonymous Credentials", CredentialPropertyDescriptors.USE_ANONYMOUS_CREDENTIALS);
|
||||
super("Anonymous Credentials", AWSCredentialsProviderControllerService.USE_ANONYMOUS_CREDENTIALS);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -45,17 +45,17 @@ import java.time.Duration;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_ARN;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_EXTERNAL_ID;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_HOST;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_PORT;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_SSL_CONTEXT_SERVICE;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_CUSTOM_SIGNER_CLASS_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_ENDPOINT;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_REGION;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_SIGNER_OVERRIDE;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.MAX_SESSION_TIME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_EXTERNAL_ID;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_PROXY_HOST;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_PROXY_PORT;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_SSL_CONTEXT_SERVICE;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_CUSTOM_SIGNER_CLASS_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_ENDPOINT;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_REGION;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_SIGNER_OVERRIDE;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.MAX_SESSION_TIME;
|
||||
import static org.apache.nifi.processors.aws.signer.AwsSignerType.CUSTOM_SIGNER;
|
||||
import static org.apache.nifi.processors.aws.signer.AwsSignerType.DEFAULT_SIGNER;
|
||||
|
||||
|
|
|
@ -16,11 +16,10 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.aws.credentials.provider.factory.strategies;
|
||||
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider;
|
||||
|
||||
|
@ -35,7 +34,7 @@ import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider;
|
|||
public class ExplicitDefaultCredentialsStrategy extends AbstractBooleanCredentialsStrategy {
|
||||
|
||||
public ExplicitDefaultCredentialsStrategy() {
|
||||
super("Default Credentials", CredentialPropertyDescriptors.USE_DEFAULT_CREDENTIALS);
|
||||
super("Default Credentials", AWSCredentialsProviderControllerService.USE_DEFAULT_CREDENTIALS);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -21,7 +21,7 @@ import com.amazonaws.auth.PropertiesFileCredentialsProvider;
|
|||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.PropertiesCredentialsProvider;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
|
||||
import java.io.File;
|
||||
|
@ -42,19 +42,19 @@ public class FileCredentialsStrategy extends AbstractCredentialsStrategy {
|
|||
|
||||
public FileCredentialsStrategy() {
|
||||
super("Credentials File", new PropertyDescriptor[] {
|
||||
CredentialPropertyDescriptors.CREDENTIALS_FILE
|
||||
AWSCredentialsProviderControllerService.CREDENTIALS_FILE
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public AWSCredentialsProvider getCredentialsProvider(final PropertyContext propertyContext) {
|
||||
final String credentialsFile = propertyContext.getProperty(CredentialPropertyDescriptors.CREDENTIALS_FILE).getValue();
|
||||
final String credentialsFile = propertyContext.getProperty(AWSCredentialsProviderControllerService.CREDENTIALS_FILE).getValue();
|
||||
return new PropertiesFileCredentialsProvider(credentialsFile);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AwsCredentialsProvider getAwsCredentialsProvider(final PropertyContext propertyContext) {
|
||||
final String credentialsFile = propertyContext.getProperty(CredentialPropertyDescriptors.CREDENTIALS_FILE).getValue();
|
||||
final String credentialsFile = propertyContext.getProperty(AWSCredentialsProviderControllerService.CREDENTIALS_FILE).getValue();
|
||||
return new PropertiesCredentialsProvider(new File(credentialsFile));
|
||||
}
|
||||
|
||||
|
|
|
@ -16,12 +16,11 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.aws.credentials.provider.factory.strategies;
|
||||
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
|
||||
|
||||
|
@ -35,19 +34,19 @@ public class NamedProfileCredentialsStrategy extends AbstractCredentialsStrategy
|
|||
|
||||
public NamedProfileCredentialsStrategy() {
|
||||
super("Named Profile", new PropertyDescriptor[] {
|
||||
CredentialPropertyDescriptors.PROFILE_NAME
|
||||
AWSCredentialsProviderControllerService.PROFILE_NAME
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public AWSCredentialsProvider getCredentialsProvider(final PropertyContext propertyContext) {
|
||||
final String profileName = propertyContext.getProperty(CredentialPropertyDescriptors.PROFILE_NAME).evaluateAttributeExpressions().getValue();
|
||||
final String profileName = propertyContext.getProperty(AWSCredentialsProviderControllerService.PROFILE_NAME).evaluateAttributeExpressions().getValue();
|
||||
return new ProfileCredentialsProvider(profileName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AwsCredentialsProvider getAwsCredentialsProvider(final PropertyContext propertyContext) {
|
||||
final String profileName = propertyContext.getProperty(CredentialPropertyDescriptors.PROFILE_NAME).evaluateAttributeExpressions().getValue();
|
||||
final String profileName = propertyContext.getProperty(AWSCredentialsProviderControllerService.PROFILE_NAME).evaluateAttributeExpressions().getValue();
|
||||
return software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider.create(profileName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,41 +17,45 @@
|
|||
package org.apache.nifi.processors.aws.credentials.provider.service;
|
||||
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import com.amazonaws.auth.Signer;
|
||||
import org.apache.nifi.annotation.behavior.Restricted;
|
||||
import org.apache.nifi.annotation.behavior.Restriction;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.annotation.lifecycle.OnEnabled;
|
||||
import org.apache.nifi.components.AllowableValue;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.components.RequiredPermission;
|
||||
import org.apache.nifi.components.ValidationContext;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.components.resource.ResourceCardinality;
|
||||
import org.apache.nifi.components.resource.ResourceType;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.controller.AbstractControllerService;
|
||||
import org.apache.nifi.controller.ConfigurationContext;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialsProviderFactory;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.AccessKeyPairCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.AnonymousCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.AssumeRoleCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.ExplicitDefaultCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.FileCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.ImplicitDefaultCredentialsStrategy;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.strategies.NamedProfileCredentialsStrategy;
|
||||
import org.apache.nifi.ssl.SSLContextService;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
import software.amazon.awssdk.regions.Region;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ACCESS_KEY_ID;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_EXTERNAL_ID;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_HOST;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_PORT;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_SSL_CONTEXT_SERVICE;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_ENDPOINT;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_SIGNER_OVERRIDE;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.CREDENTIALS_FILE;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_CUSTOM_SIGNER_CLASS_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_CUSTOM_SIGNER_MODULE_LOCATION;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.PROFILE_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.SECRET_KEY;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.USE_ANONYMOUS_CREDENTIALS;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.USE_DEFAULT_CREDENTIALS;
|
||||
import static org.apache.nifi.processors.aws.signer.AwsSignerType.AWS_V4_SIGNER;
|
||||
import static org.apache.nifi.processors.aws.signer.AwsSignerType.CUSTOM_SIGNER;
|
||||
import static org.apache.nifi.processors.aws.signer.AwsSignerType.DEFAULT_SIGNER;
|
||||
|
||||
/**
|
||||
* Implementation of AWSCredentialsProviderService interface
|
||||
|
@ -64,48 +68,249 @@ import static org.apache.nifi.processors.aws.credentials.provider.factory.Creden
|
|||
"Additional options include access key / secret key pairs, credentials file, named profile, and assume role credentials.")
|
||||
@Tags({ "aws", "credentials","provider" })
|
||||
@Restricted(
|
||||
restrictions = {
|
||||
@Restriction(
|
||||
requiredPermission = RequiredPermission.ACCESS_ENVIRONMENT_CREDENTIALS,
|
||||
explanation = "The default configuration can read environment variables and system properties for credentials"
|
||||
)
|
||||
}
|
||||
restrictions = {
|
||||
@Restriction(
|
||||
requiredPermission = RequiredPermission.ACCESS_ENVIRONMENT_CREDENTIALS,
|
||||
explanation = "The default configuration can read environment variables and system properties for credentials"
|
||||
)
|
||||
}
|
||||
)
|
||||
public class AWSCredentialsProviderControllerService extends AbstractControllerService implements AWSCredentialsProviderService {
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_ARN = CredentialPropertyDescriptors.ASSUME_ROLE_ARN;
|
||||
public static final PropertyDescriptor ASSUME_ROLE_NAME = CredentialPropertyDescriptors.ASSUME_ROLE_NAME;
|
||||
public static final PropertyDescriptor MAX_SESSION_TIME = CredentialPropertyDescriptors.MAX_SESSION_TIME;
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_REGION = CredentialPropertyDescriptors.ASSUME_ROLE_STS_REGION;
|
||||
public static final PropertyDescriptor USE_DEFAULT_CREDENTIALS = new PropertyDescriptor.Builder()
|
||||
.name("default-credentials")
|
||||
.displayName("Use Default Credentials")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.allowableValues("true", "false")
|
||||
.defaultValue("false")
|
||||
.description("If true, uses the Default Credential chain, including EC2 instance profiles or roles, " +
|
||||
"environment variables, default user credentials, etc.")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES;
|
||||
public static final PropertyDescriptor PROFILE_NAME = new PropertyDescriptor.Builder()
|
||||
.name("profile-name")
|
||||
.displayName("Profile Name")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("The AWS profile name for credentials from the profile configuration file.")
|
||||
.build();
|
||||
|
||||
static {
|
||||
final List<PropertyDescriptor> props = new ArrayList<>();
|
||||
props.add(USE_DEFAULT_CREDENTIALS);
|
||||
props.add(ACCESS_KEY_ID);
|
||||
props.add(SECRET_KEY);
|
||||
props.add(CREDENTIALS_FILE);
|
||||
props.add(PROFILE_NAME);
|
||||
props.add(USE_ANONYMOUS_CREDENTIALS);
|
||||
props.add(ASSUME_ROLE_ARN);
|
||||
props.add(ASSUME_ROLE_NAME);
|
||||
props.add(MAX_SESSION_TIME);
|
||||
props.add(ASSUME_ROLE_EXTERNAL_ID);
|
||||
props.add(ASSUME_ROLE_SSL_CONTEXT_SERVICE);
|
||||
props.add(ASSUME_ROLE_PROXY_HOST);
|
||||
props.add(ASSUME_ROLE_PROXY_PORT);
|
||||
props.add(ASSUME_ROLE_STS_REGION);
|
||||
props.add(ASSUME_ROLE_STS_ENDPOINT);
|
||||
props.add(ASSUME_ROLE_STS_SIGNER_OVERRIDE);
|
||||
props.add(ASSUME_ROLE_STS_CUSTOM_SIGNER_CLASS_NAME);
|
||||
props.add(ASSUME_ROLE_STS_CUSTOM_SIGNER_MODULE_LOCATION);
|
||||
PROPERTIES = Collections.unmodifiableList(props);
|
||||
}
|
||||
public static final PropertyDescriptor CREDENTIALS_FILE = new PropertyDescriptor.Builder()
|
||||
.name("Credentials File")
|
||||
.displayName("Credentials File")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.identifiesExternalResource(ResourceCardinality.SINGLE, ResourceType.FILE)
|
||||
.description("Path to a file containing AWS access key and secret key in properties file format.")
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ACCESS_KEY_ID = new PropertyDescriptor.Builder()
|
||||
.name("Access Key")
|
||||
.displayName("Access Key ID")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(true)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor SECRET_KEY = new PropertyDescriptor.Builder()
|
||||
.name("Secret Key")
|
||||
.displayName("Secret Access Key")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(true)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor USE_ANONYMOUS_CREDENTIALS = new PropertyDescriptor.Builder()
|
||||
.name("anonymous-credentials")
|
||||
.displayName("Use Anonymous Credentials")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.allowableValues("true", "false")
|
||||
.defaultValue("false")
|
||||
.description("If true, uses Anonymous credentials")
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_ARN = new PropertyDescriptor.Builder()
|
||||
.name("Assume Role ARN")
|
||||
.displayName("Assume Role ARN")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("The AWS Role ARN for cross account access. This is used in conjunction with Assume Role Session Name and other Assume Role properties.")
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_NAME = new PropertyDescriptor.Builder()
|
||||
.name("Assume Role Session Name")
|
||||
.displayName("Assume Role Session Name")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(true)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("The AWS Role Session Name for cross account access. This is used in conjunction with Assume Role ARN.")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_REGION = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-sts-region")
|
||||
.displayName("Assume Role STS Region")
|
||||
.description("The AWS Security Token Service (STS) region")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.allowableValues(getAvailableRegions())
|
||||
.defaultValue(createAllowableValue(Region.US_WEST_2).getValue())
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_EXTERNAL_ID = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-external-id")
|
||||
.displayName("Assume Role External ID")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("External ID for cross-account access. This is used in conjunction with Assume Role ARN.")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_SSL_CONTEXT_SERVICE = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-ssl-context-service")
|
||||
.displayName("Assume Role SSL Context Service")
|
||||
.description("SSL Context Service used when connecting to the STS Endpoint.")
|
||||
.identifiesControllerService(SSLContextService.class)
|
||||
.required(false)
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
/**
|
||||
* Assume Role Proxy variables for configuring proxy to retrieve keys
|
||||
*/
|
||||
public static final PropertyDescriptor ASSUME_ROLE_PROXY_HOST = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-proxy-host")
|
||||
.displayName("Assume Role Proxy Host")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("Proxy host for cross-account access, if needed within your environment. This will configure a proxy to request for temporary access keys into another AWS account.")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_PROXY_PORT = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-proxy-port")
|
||||
.displayName("Assume Role Proxy Port")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("Proxy port for cross-account access, if needed within your environment. This will configure a proxy to request for temporary access keys into another AWS account.")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_ENDPOINT = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-sts-endpoint")
|
||||
.displayName("Assume Role STS Endpoint Override")
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.description("The default AWS Security Token Service (STS) endpoint (\"sts.amazonaws.com\") works for " +
|
||||
"all accounts that are not for China (Beijing) region or GovCloud. You only need to set " +
|
||||
"this property to \"sts.cn-north-1.amazonaws.com.cn\" when you are requesting session credentials " +
|
||||
"for services in China(Beijing) region or to \"sts.us-gov-west-1.amazonaws.com\" for GovCloud.")
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_SIGNER_OVERRIDE = new PropertyDescriptor.Builder()
|
||||
.name("assume-role-sts-signer-override")
|
||||
.displayName("Assume Role STS Signer Override")
|
||||
.description("The AWS STS library uses Signature Version 4 by default. This property allows you to plug in your own custom signer implementation.")
|
||||
.required(false)
|
||||
.allowableValues(EnumSet.of(DEFAULT_SIGNER, AWS_V4_SIGNER, CUSTOM_SIGNER))
|
||||
.defaultValue(DEFAULT_SIGNER.getValue())
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor MAX_SESSION_TIME = new PropertyDescriptor.Builder()
|
||||
.name("Session Time")
|
||||
.displayName("Assume Role Session Time")
|
||||
.description("Session time for role based session (between 900 and 3600 seconds). This is used in conjunction with Assume Role ARN.")
|
||||
.defaultValue("3600")
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
|
||||
.sensitive(false)
|
||||
.dependsOn(ASSUME_ROLE_ARN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_CUSTOM_SIGNER_CLASS_NAME = new PropertyDescriptor.Builder()
|
||||
.name("custom-signer-class-name")
|
||||
.displayName("Custom Signer Class Name")
|
||||
.description(String.format("Fully qualified class name of the custom signer class. The signer must implement %s interface.", Signer.class.getName()))
|
||||
.required(true)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.dependsOn(ASSUME_ROLE_STS_SIGNER_OVERRIDE, CUSTOM_SIGNER)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ASSUME_ROLE_STS_CUSTOM_SIGNER_MODULE_LOCATION = new PropertyDescriptor.Builder()
|
||||
.name("custom-signer-module-location")
|
||||
.displayName("Custom Signer Module Location")
|
||||
.description("Comma-separated list of paths to files and/or directories which contain the custom signer's JAR file and its dependencies (if any).")
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.identifiesExternalResource(ResourceCardinality.MULTIPLE, ResourceType.FILE, ResourceType.DIRECTORY)
|
||||
.dependsOn(ASSUME_ROLE_STS_SIGNER_OVERRIDE, CUSTOM_SIGNER)
|
||||
.dynamicallyModifiesClasspath(true)
|
||||
.build();
|
||||
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
USE_DEFAULT_CREDENTIALS,
|
||||
ACCESS_KEY_ID,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
PROFILE_NAME,
|
||||
USE_ANONYMOUS_CREDENTIALS,
|
||||
ASSUME_ROLE_ARN,
|
||||
ASSUME_ROLE_NAME,
|
||||
MAX_SESSION_TIME,
|
||||
ASSUME_ROLE_EXTERNAL_ID,
|
||||
ASSUME_ROLE_SSL_CONTEXT_SERVICE,
|
||||
ASSUME_ROLE_PROXY_HOST,
|
||||
ASSUME_ROLE_PROXY_PORT,
|
||||
ASSUME_ROLE_STS_REGION,
|
||||
ASSUME_ROLE_STS_ENDPOINT,
|
||||
ASSUME_ROLE_STS_SIGNER_OVERRIDE,
|
||||
ASSUME_ROLE_STS_CUSTOM_SIGNER_CLASS_NAME,
|
||||
ASSUME_ROLE_STS_CUSTOM_SIGNER_MODULE_LOCATION
|
||||
);
|
||||
|
||||
private volatile ConfigurationContext context;
|
||||
private volatile AWSCredentialsProvider credentialsProvider;
|
||||
protected final CredentialsProviderFactory credentialsProviderFactory = new CredentialsProviderFactory();
|
||||
|
||||
private final List<CredentialsStrategy> strategies = List.of(
|
||||
// Primary Credential Strategies
|
||||
new ExplicitDefaultCredentialsStrategy(),
|
||||
new AccessKeyPairCredentialsStrategy(),
|
||||
new FileCredentialsStrategy(),
|
||||
new NamedProfileCredentialsStrategy(),
|
||||
new AnonymousCredentialsStrategy(),
|
||||
|
||||
// Implicit Default is the catch-all primary strategy
|
||||
new ImplicitDefaultCredentialsStrategy(),
|
||||
|
||||
// Derived Credential Strategies
|
||||
new AssumeRoleCredentialsStrategy());
|
||||
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
@ -120,22 +325,88 @@ public class AWSCredentialsProviderControllerService extends AbstractControllerS
|
|||
@Override
|
||||
public AwsCredentialsProvider getAwsCredentialsProvider() {
|
||||
// Avoiding instantiation until actually used, in case v1-related configuration is not compatible with v2 clients
|
||||
return credentialsProviderFactory.getAwsCredentialsProvider(context);
|
||||
final CredentialsStrategy primaryStrategy = selectPrimaryStrategy(context);
|
||||
final AwsCredentialsProvider primaryCredentialsProvider = primaryStrategy.getAwsCredentialsProvider(context);
|
||||
AwsCredentialsProvider derivedCredentialsProvider = null;
|
||||
|
||||
for (final CredentialsStrategy strategy : strategies) {
|
||||
if (strategy.canCreateDerivedCredential(context)) {
|
||||
derivedCredentialsProvider = strategy.getDerivedAwsCredentialsProvider(context, primaryCredentialsProvider);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return derivedCredentialsProvider == null ? primaryCredentialsProvider : derivedCredentialsProvider;
|
||||
}
|
||||
|
||||
private CredentialsStrategy selectPrimaryStrategy(final PropertyContext propertyContext) {
|
||||
for (final CredentialsStrategy strategy : strategies) {
|
||||
if (strategy.canCreatePrimaryCredential(propertyContext)) {
|
||||
return strategy;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
|
||||
return credentialsProviderFactory.validate(validationContext);
|
||||
final CredentialsStrategy selectedStrategy = selectPrimaryStrategy(validationContext);
|
||||
final ArrayList<ValidationResult> validationFailureResults = new ArrayList<ValidationResult>();
|
||||
|
||||
for (CredentialsStrategy strategy : strategies) {
|
||||
final Collection<ValidationResult> strategyValidationFailures = strategy.validate(validationContext,
|
||||
selectedStrategy);
|
||||
if (strategyValidationFailures != null) {
|
||||
validationFailureResults.addAll(strategyValidationFailures);
|
||||
}
|
||||
}
|
||||
|
||||
return validationFailureResults;
|
||||
}
|
||||
|
||||
@OnEnabled
|
||||
public void onConfigured(final ConfigurationContext context) {
|
||||
this.context = context;
|
||||
|
||||
credentialsProvider = credentialsProviderFactory.getCredentialsProvider(context);
|
||||
credentialsProvider = createCredentialsProvider(context);
|
||||
getLogger().debug("Using credentials provider: " + credentialsProvider.getClass());
|
||||
}
|
||||
|
||||
private AWSCredentialsProvider createCredentialsProvider(final PropertyContext propertyContext) {
|
||||
final CredentialsStrategy primaryStrategy = selectPrimaryStrategy(propertyContext);
|
||||
AWSCredentialsProvider primaryCredentialsProvider = primaryStrategy.getCredentialsProvider(propertyContext);
|
||||
AWSCredentialsProvider derivedCredentialsProvider = null;
|
||||
|
||||
for (CredentialsStrategy strategy : strategies) {
|
||||
if (strategy.canCreateDerivedCredential(propertyContext)) {
|
||||
derivedCredentialsProvider = strategy.getDerivedCredentialsProvider(propertyContext, primaryCredentialsProvider);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (derivedCredentialsProvider != null) {
|
||||
return derivedCredentialsProvider;
|
||||
} else {
|
||||
return primaryCredentialsProvider;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static AllowableValue[] getAvailableRegions() {
|
||||
final List<AllowableValue> values = new ArrayList<>();
|
||||
for (final Region region : Region.regions()) {
|
||||
if (region.isGlobalRegion()) {
|
||||
continue;
|
||||
}
|
||||
values.add(createAllowableValue(region));
|
||||
}
|
||||
return values.toArray(new AllowableValue[0]);
|
||||
}
|
||||
|
||||
public static AllowableValue createAllowableValue(final Region region) {
|
||||
return new AllowableValue(region.id(), region.metadata().description(), "AWS Region Code : " + region.id());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AWSCredentialsProviderService[id=" + getIdentifier() + "]";
|
||||
|
|
|
@ -40,8 +40,6 @@ import org.apache.nifi.flowfile.FlowFile;
|
|||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -71,11 +69,20 @@ import java.util.Map;
|
|||
})
|
||||
public class DeleteDynamoDB extends AbstractDynamoDBProcessor {
|
||||
|
||||
public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
|
||||
Arrays.asList(TABLE, HASH_KEY_NAME, RANGE_KEY_NAME, HASH_KEY_VALUE, RANGE_KEY_VALUE,
|
||||
HASH_KEY_VALUE_TYPE, RANGE_KEY_VALUE_TYPE, BATCH_SIZE, REGION, ACCESS_KEY, SECRET_KEY,
|
||||
CREDENTIALS_FILE, AWS_CREDENTIALS_PROVIDER_SERVICE, TIMEOUT, SSL_CONTEXT_SERVICE,
|
||||
PROXY_CONFIGURATION_SERVICE, PROXY_HOST, PROXY_HOST_PORT, PROXY_USERNAME, PROXY_PASSWORD));
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
TABLE,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
HASH_KEY_NAME,
|
||||
RANGE_KEY_NAME,
|
||||
HASH_KEY_VALUE,
|
||||
RANGE_KEY_VALUE,
|
||||
HASH_KEY_VALUE_TYPE,
|
||||
RANGE_KEY_VALUE_TYPE,
|
||||
BATCH_SIZE,
|
||||
TIMEOUT,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
PROXY_CONFIGURATION_SERVICE);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
|
|
@ -81,11 +81,21 @@ import java.util.stream.Collectors;
|
|||
})
|
||||
public class GetDynamoDB extends AbstractDynamoDBProcessor {
|
||||
|
||||
public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
|
||||
Arrays.asList(TABLE, HASH_KEY_NAME, RANGE_KEY_NAME, HASH_KEY_VALUE, RANGE_KEY_VALUE,
|
||||
HASH_KEY_VALUE_TYPE, RANGE_KEY_VALUE_TYPE, JSON_DOCUMENT, BATCH_SIZE, REGION, ACCESS_KEY, SECRET_KEY,
|
||||
CREDENTIALS_FILE, AWS_CREDENTIALS_PROVIDER_SERVICE, TIMEOUT, SSL_CONTEXT_SERVICE,
|
||||
PROXY_CONFIGURATION_SERVICE, PROXY_HOST, PROXY_HOST_PORT, PROXY_USERNAME, PROXY_PASSWORD));
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
TABLE,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
JSON_DOCUMENT,
|
||||
HASH_KEY_NAME,
|
||||
RANGE_KEY_NAME,
|
||||
HASH_KEY_VALUE,
|
||||
RANGE_KEY_VALUE,
|
||||
HASH_KEY_VALUE_TYPE,
|
||||
RANGE_KEY_VALUE_TYPE,
|
||||
BATCH_SIZE,
|
||||
TIMEOUT,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
PROXY_CONFIGURATION_SERVICE);
|
||||
|
||||
public static final Relationship REL_NOT_FOUND = new Relationship.Builder().name("not found")
|
||||
.description("FlowFiles are routed to not found relationship if key not found in the table").build();
|
||||
|
|
|
@ -45,8 +45,6 @@ import org.apache.nifi.processor.ProcessContext;
|
|||
import org.apache.nifi.processor.ProcessSession;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -80,15 +78,23 @@ import java.util.Map;
|
|||
@SystemResourceConsideration(resource = SystemResource.MEMORY)
|
||||
public class PutDynamoDB extends AbstractDynamoDBProcessor {
|
||||
|
||||
public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(
|
||||
Arrays.asList(TABLE, HASH_KEY_NAME, RANGE_KEY_NAME, HASH_KEY_VALUE, RANGE_KEY_VALUE,
|
||||
HASH_KEY_VALUE_TYPE, RANGE_KEY_VALUE_TYPE, JSON_DOCUMENT, DOCUMENT_CHARSET, BATCH_SIZE,
|
||||
REGION, ACCESS_KEY, SECRET_KEY, CREDENTIALS_FILE, AWS_CREDENTIALS_PROVIDER_SERVICE, TIMEOUT, SSL_CONTEXT_SERVICE,
|
||||
PROXY_CONFIGURATION_SERVICE, PROXY_HOST, PROXY_HOST_PORT, PROXY_USERNAME, PROXY_PASSWORD));
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
TABLE,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
JSON_DOCUMENT,
|
||||
HASH_KEY_NAME,
|
||||
RANGE_KEY_NAME,
|
||||
HASH_KEY_VALUE,
|
||||
RANGE_KEY_VALUE,
|
||||
HASH_KEY_VALUE_TYPE,
|
||||
RANGE_KEY_VALUE_TYPE,
|
||||
DOCUMENT_CHARSET,
|
||||
BATCH_SIZE,
|
||||
TIMEOUT,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
PROXY_CONFIGURATION_SERVICE);
|
||||
|
||||
/**
|
||||
* Dyamodb max item size limit 400 kb
|
||||
*/
|
||||
public static final int DYNAMODB_MAX_ITEM_SIZE = 400 * 1024;
|
||||
|
||||
@Override
|
||||
|
|
|
@ -50,7 +50,6 @@ import org.apache.nifi.serialization.SplitRecordSetHandlerException;
|
|||
import org.apache.nifi.serialization.record.Record;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -89,10 +88,8 @@ import java.util.UUID;
|
|||
})
|
||||
public class PutDynamoDBRecord extends AbstractDynamoDBProcessor {
|
||||
|
||||
/**
|
||||
* Due to DynamoDB's hardcoded limitation on the number of items in one batch, the processor writes them in chunks.
|
||||
* Every chunk contains a number of items according to the limitations.
|
||||
*/
|
||||
// Due to DynamoDB's hardcoded limitation on the number of items in one batch, the processor writes them in chunks.
|
||||
// Every chunk contains a number of items according to the limitations.
|
||||
private static final int MAXIMUM_CHUNK_SIZE = 25;
|
||||
|
||||
static final String DYNAMODB_CHUNKS_PROCESSED_ATTRIBUTE = "dynamodb.chunks.processed";
|
||||
|
@ -171,19 +168,19 @@ public class PutDynamoDBRecord extends AbstractDynamoDBProcessor {
|
|||
.description("Defines the name of the sort key field in the DynamoDB table. Sort key is also known as range key.")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Arrays.asList(
|
||||
RECORD_READER,
|
||||
new PropertyDescriptor.Builder().fromPropertyDescriptor(AWS_CREDENTIALS_PROVIDER_SERVICE).required(true).build(),
|
||||
REGION,
|
||||
TABLE,
|
||||
PARTITION_KEY_STRATEGY,
|
||||
PARTITION_KEY_FIELD,
|
||||
PARTITION_KEY_ATTRIBUTE,
|
||||
SORT_KEY_STRATEGY,
|
||||
SORT_KEY_FIELD,
|
||||
TIMEOUT,
|
||||
ProxyConfigurationService.PROXY_CONFIGURATION_SERVICE,
|
||||
SSL_CONTEXT_SERVICE
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
TABLE,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
RECORD_READER,
|
||||
PARTITION_KEY_STRATEGY,
|
||||
PARTITION_KEY_FIELD,
|
||||
PARTITION_KEY_ATTRIBUTE,
|
||||
SORT_KEY_STRATEGY,
|
||||
SORT_KEY_FIELD,
|
||||
TIMEOUT,
|
||||
ProxyConfigurationService.PROXY_CONFIGURATION_SERVICE,
|
||||
SSL_CONTEXT_SERVICE
|
||||
);
|
||||
|
||||
@Override
|
||||
|
|
|
@ -109,7 +109,7 @@ final class RecordToItemConverter {
|
|||
} else if (value instanceof Map) {
|
||||
return getMapFieldAsMap(value);
|
||||
} else if (value instanceof Character || value instanceof Timestamp || value instanceof Date || value instanceof Time) {
|
||||
return ((Character) value).toString();
|
||||
return value.toString();
|
||||
} else if (value instanceof Enum) {
|
||||
return ((Enum) value).name();
|
||||
} else {
|
||||
|
|
|
@ -87,21 +87,15 @@ public class PutKinesisFirehose extends AbstractAwsSyncProcessor<FirehoseClient,
|
|||
.sensitive(false)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> properties = List.of(KINESIS_FIREHOSE_DELIVERY_STREAM_NAME,
|
||||
BATCH_SIZE,
|
||||
MAX_MESSAGE_BUFFER_SIZE_MB,
|
||||
REGION,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
TIMEOUT,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD,
|
||||
ENDPOINT_OVERRIDE);
|
||||
private static final List<PropertyDescriptor> properties = List.of(
|
||||
KINESIS_FIREHOSE_DELIVERY_STREAM_NAME,
|
||||
BATCH_SIZE,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
MAX_MESSAGE_BUFFER_SIZE_MB,
|
||||
TIMEOUT,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
ENDPOINT_OVERRIDE);
|
||||
|
||||
public static final int MAX_MESSAGE_SIZE = KinesisProcessorUtils.MAX_MESSAGE_SIZE;
|
||||
|
||||
|
|
|
@ -114,22 +114,15 @@ public class PutKinesisStream extends AbstractAwsSyncProcessor<KinesisClient, Ki
|
|||
.build();
|
||||
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
KINESIS_STREAM_NAME,
|
||||
KINESIS_PARTITION_KEY,
|
||||
BATCH_SIZE,
|
||||
MAX_MESSAGE_BUFFER_SIZE_MB,
|
||||
REGION,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
TIMEOUT,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD,
|
||||
ENDPOINT_OVERRIDE);
|
||||
KINESIS_STREAM_NAME,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
KINESIS_PARTITION_KEY,
|
||||
BATCH_SIZE,
|
||||
MAX_MESSAGE_BUFFER_SIZE_MB,
|
||||
TIMEOUT,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
ENDPOINT_OVERRIDE);
|
||||
|
||||
/** A random number generator for cases where partition key is not available */
|
||||
protected Random randomPartitionKeyGenerator = new Random();
|
||||
|
|
|
@ -102,16 +102,9 @@ public class PutLambda extends AbstractAwsSyncProcessor<LambdaClient, LambdaClie
|
|||
AWS_LAMBDA_FUNCTION_NAME,
|
||||
AWS_LAMBDA_FUNCTION_QUALIFIER,
|
||||
REGION,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
TIMEOUT,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD,
|
||||
ENDPOINT_OVERRIDE);
|
||||
|
||||
private static final Charset DEFAULT_CHARSET = Charset.defaultCharset();
|
||||
|
|
|
@ -35,8 +35,6 @@ import org.apache.nifi.processor.ProcessContext;
|
|||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -62,14 +60,11 @@ public class DeleteS3Object extends AbstractS3Processor {
|
|||
.required(false)
|
||||
.build();
|
||||
|
||||
public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(Arrays.asList(
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
BUCKET_WITH_DEFAULT_VALUE,
|
||||
KEY,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
S3_REGION,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
TIMEOUT,
|
||||
VERSION_ID,
|
||||
FULL_CONTROL_USER_LIST,
|
||||
|
@ -83,11 +78,7 @@ public class DeleteS3Object extends AbstractS3Processor {
|
|||
SIGNER_OVERRIDE,
|
||||
S3_CUSTOM_SIGNER_CLASS_NAME,
|
||||
S3_CUSTOM_SIGNER_MODULE_LOCATION,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD));
|
||||
PROXY_CONFIGURATION_SERVICE);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
|
|
@ -56,9 +56,7 @@ import java.io.IOException;
|
|||
import java.net.URLDecoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -213,30 +211,23 @@ public class FetchS3Object extends AbstractS3Processor {
|
|||
.required(false)
|
||||
.build();
|
||||
|
||||
public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(Arrays.asList(
|
||||
BUCKET_WITH_DEFAULT_VALUE,
|
||||
KEY,
|
||||
S3_REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
TIMEOUT,
|
||||
VERSION_ID,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
ENDPOINT_OVERRIDE,
|
||||
SIGNER_OVERRIDE,
|
||||
S3_CUSTOM_SIGNER_CLASS_NAME,
|
||||
S3_CUSTOM_SIGNER_MODULE_LOCATION,
|
||||
ENCRYPTION_SERVICE,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD,
|
||||
REQUESTER_PAYS,
|
||||
RANGE_START,
|
||||
RANGE_LENGTH));
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
BUCKET_WITH_DEFAULT_VALUE,
|
||||
KEY,
|
||||
S3_REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
TIMEOUT,
|
||||
VERSION_ID,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
ENDPOINT_OVERRIDE,
|
||||
SIGNER_OVERRIDE,
|
||||
S3_CUSTOM_SIGNER_CLASS_NAME,
|
||||
S3_CUSTOM_SIGNER_MODULE_LOCATION,
|
||||
ENCRYPTION_SERVICE,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
REQUESTER_PAYS,
|
||||
RANGE_START,
|
||||
RANGE_LENGTH);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
|
|
@ -87,7 +87,6 @@ import java.io.IOException;
|
|||
import java.io.OutputStream;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -284,39 +283,32 @@ public class ListS3 extends AbstractS3Processor implements VerifiableProcessor {
|
|||
.build();
|
||||
|
||||
|
||||
public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(Arrays.asList(
|
||||
BUCKET_WITHOUT_DEFAULT_VALUE,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
LISTING_STRATEGY,
|
||||
TRACKING_STATE_CACHE,
|
||||
INITIAL_LISTING_TARGET,
|
||||
TRACKING_TIME_WINDOW,
|
||||
RECORD_WRITER,
|
||||
MIN_AGE,
|
||||
MAX_AGE,
|
||||
BATCH_SIZE,
|
||||
WRITE_OBJECT_TAGS,
|
||||
WRITE_USER_METADATA,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
TIMEOUT,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
ENDPOINT_OVERRIDE,
|
||||
SIGNER_OVERRIDE,
|
||||
S3_CUSTOM_SIGNER_CLASS_NAME,
|
||||
S3_CUSTOM_SIGNER_MODULE_LOCATION,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD,
|
||||
DELIMITER,
|
||||
PREFIX,
|
||||
USE_VERSIONS,
|
||||
LIST_TYPE,
|
||||
REQUESTER_PAYS));
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
BUCKET_WITHOUT_DEFAULT_VALUE,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
LISTING_STRATEGY,
|
||||
TRACKING_STATE_CACHE,
|
||||
INITIAL_LISTING_TARGET,
|
||||
TRACKING_TIME_WINDOW,
|
||||
RECORD_WRITER,
|
||||
MIN_AGE,
|
||||
MAX_AGE,
|
||||
BATCH_SIZE,
|
||||
WRITE_OBJECT_TAGS,
|
||||
WRITE_USER_METADATA,
|
||||
TIMEOUT,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
ENDPOINT_OVERRIDE,
|
||||
SIGNER_OVERRIDE,
|
||||
S3_CUSTOM_SIGNER_CLASS_NAME,
|
||||
S3_CUSTOM_SIGNER_MODULE_LOCATION,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
DELIMITER,
|
||||
PREFIX,
|
||||
USE_VERSIONS,
|
||||
LIST_TYPE,
|
||||
REQUESTER_PAYS);
|
||||
|
||||
public static final Set<Relationship> relationships = Collections.singleton(REL_SUCCESS);
|
||||
|
||||
|
|
|
@ -182,8 +182,7 @@ public class PutS3Object extends AbstractS3Processor {
|
|||
.name("Multipart Threshold")
|
||||
.description("Specifies the file size threshold for switch from the PutS3Object API to the " +
|
||||
"PutS3MultipartUpload API. Flow files bigger than this limit will be sent using the stateful " +
|
||||
"multipart process.\n" +
|
||||
"The valid range is 50MB to 5GB.")
|
||||
"multipart process. The valid range is 50MB to 5GB.")
|
||||
.required(true)
|
||||
.defaultValue("5 GB")
|
||||
.addValidator(StandardValidators.createDataSizeBoundsValidator(MIN_S3_PART_SIZE, MAX_S3_PUTOBJECT_SIZE))
|
||||
|
@ -191,10 +190,9 @@ public class PutS3Object extends AbstractS3Processor {
|
|||
|
||||
public static final PropertyDescriptor MULTIPART_PART_SIZE = new PropertyDescriptor.Builder()
|
||||
.name("Multipart Part Size")
|
||||
.description("Specifies the part size for use when the PutS3Multipart Upload API is used.\n" +
|
||||
.description("Specifies the part size for use when the PutS3Multipart Upload API is used. " +
|
||||
"Flow files will be broken into chunks of this size for the upload process, but the last part " +
|
||||
"sent can be smaller since it is not padded.\n" +
|
||||
"The valid range is 50MB to 5GB.")
|
||||
"sent can be smaller since it is not padded. The valid range is 50MB to 5GB.")
|
||||
.required(true)
|
||||
.defaultValue("5 GB")
|
||||
.addValidator(StandardValidators.createDataSizeBoundsValidator(MIN_S3_PART_SIZE, MAX_S3_PUTOBJECT_SIZE))
|
||||
|
@ -260,7 +258,7 @@ public class PutS3Object extends AbstractS3Processor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.build();
|
||||
|
||||
public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(Arrays.asList(
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
BUCKET_WITH_DEFAULT_VALUE,
|
||||
KEY,
|
||||
S3_REGION,
|
||||
|
@ -271,9 +269,6 @@ public class PutS3Object extends AbstractS3Processor {
|
|||
CONTENT_TYPE,
|
||||
CONTENT_DISPOSITION,
|
||||
CACHE_CONTROL,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
OBJECT_TAGS_PREFIX,
|
||||
REMOVE_TAG_PREFIX,
|
||||
TIMEOUT,
|
||||
|
@ -297,11 +292,7 @@ public class PutS3Object extends AbstractS3Processor {
|
|||
MULTIPART_TEMP_DIR,
|
||||
USE_CHUNKED_ENCODING,
|
||||
USE_PATH_STYLE_ACCESS,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD));
|
||||
PROXY_CONFIGURATION_SERVICE);
|
||||
|
||||
final static String S3_BUCKET_KEY = "s3.bucket";
|
||||
final static String S3_OBJECT_KEY = "s3.key";
|
||||
|
@ -313,7 +304,6 @@ public class PutS3Object extends AbstractS3Processor {
|
|||
final static String S3_CACHE_CONTROL = "s3.cachecontrol";
|
||||
final static String S3_EXPIRATION_ATTR_KEY = "s3.expiration";
|
||||
final static String S3_STORAGECLASS_ATTR_KEY = "s3.storeClass";
|
||||
final static String S3_STORAGECLASS_META_KEY = "x-amz-storage-class";
|
||||
final static String S3_USERMETA_ATTR_KEY = "s3.usermetadata";
|
||||
final static String S3_API_METHOD_ATTR_KEY = "s3.apimethod";
|
||||
final static String S3_API_METHOD_PUTOBJECT = "putobject";
|
||||
|
@ -983,106 +973,106 @@ public class PutS3Object extends AbstractS3Processor {
|
|||
|
||||
private static final String SEPARATOR = "#";
|
||||
|
||||
private String _uploadId;
|
||||
private Long _filePosition;
|
||||
private List<PartETag> _partETags;
|
||||
private Long _partSize;
|
||||
private StorageClass _storageClass;
|
||||
private Long _contentLength;
|
||||
private Long _timestamp;
|
||||
private String uploadId;
|
||||
private Long filePosition;
|
||||
private List<PartETag> partETags;
|
||||
private Long partSize;
|
||||
private StorageClass storageClass;
|
||||
private Long contentLength;
|
||||
private Long timestamp;
|
||||
|
||||
public MultipartState() {
|
||||
_uploadId = "";
|
||||
_filePosition = 0L;
|
||||
_partETags = new ArrayList<>();
|
||||
_partSize = 0L;
|
||||
_storageClass = StorageClass.Standard;
|
||||
_contentLength = 0L;
|
||||
_timestamp = System.currentTimeMillis();
|
||||
uploadId = "";
|
||||
filePosition = 0L;
|
||||
partETags = new ArrayList<>();
|
||||
partSize = 0L;
|
||||
storageClass = StorageClass.Standard;
|
||||
contentLength = 0L;
|
||||
timestamp = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
// create from a previous toString() result
|
||||
public MultipartState(String buf) {
|
||||
public MultipartState(final String buf) {
|
||||
String[] fields = buf.split(SEPARATOR);
|
||||
_uploadId = fields[0];
|
||||
_filePosition = Long.parseLong(fields[1]);
|
||||
_partETags = new ArrayList<>();
|
||||
uploadId = fields[0];
|
||||
filePosition = Long.parseLong(fields[1]);
|
||||
partETags = new ArrayList<>();
|
||||
for (String part : fields[2].split(",")) {
|
||||
if (part != null && !part.isEmpty()) {
|
||||
String[] partFields = part.split("/");
|
||||
_partETags.add(new PartETag(Integer.parseInt(partFields[0]), partFields[1]));
|
||||
partETags.add(new PartETag(Integer.parseInt(partFields[0]), partFields[1]));
|
||||
}
|
||||
}
|
||||
_partSize = Long.parseLong(fields[3]);
|
||||
_storageClass = StorageClass.fromValue(fields[4]);
|
||||
_contentLength = Long.parseLong(fields[5]);
|
||||
_timestamp = Long.parseLong(fields[6]);
|
||||
partSize = Long.parseLong(fields[3]);
|
||||
storageClass = StorageClass.fromValue(fields[4]);
|
||||
contentLength = Long.parseLong(fields[5]);
|
||||
timestamp = Long.parseLong(fields[6]);
|
||||
}
|
||||
|
||||
public String getUploadId() {
|
||||
return _uploadId;
|
||||
return uploadId;
|
||||
}
|
||||
|
||||
public void setUploadId(String id) {
|
||||
_uploadId = id;
|
||||
uploadId = id;
|
||||
}
|
||||
|
||||
public Long getFilePosition() {
|
||||
return _filePosition;
|
||||
return filePosition;
|
||||
}
|
||||
|
||||
public void setFilePosition(Long pos) {
|
||||
_filePosition = pos;
|
||||
filePosition = pos;
|
||||
}
|
||||
|
||||
public List<PartETag> getPartETags() {
|
||||
return _partETags;
|
||||
return partETags;
|
||||
}
|
||||
|
||||
public void addPartETag(PartETag tag) {
|
||||
_partETags.add(tag);
|
||||
partETags.add(tag);
|
||||
}
|
||||
|
||||
public Long getPartSize() {
|
||||
return _partSize;
|
||||
return partSize;
|
||||
}
|
||||
|
||||
public void setPartSize(Long size) {
|
||||
_partSize = size;
|
||||
partSize = size;
|
||||
}
|
||||
|
||||
public StorageClass getStorageClass() {
|
||||
return _storageClass;
|
||||
return storageClass;
|
||||
}
|
||||
|
||||
public void setStorageClass(StorageClass aClass) {
|
||||
_storageClass = aClass;
|
||||
storageClass = aClass;
|
||||
}
|
||||
|
||||
public Long getContentLength() {
|
||||
return _contentLength;
|
||||
return contentLength;
|
||||
}
|
||||
|
||||
public void setContentLength(Long length) {
|
||||
_contentLength = length;
|
||||
contentLength = length;
|
||||
}
|
||||
|
||||
public Long getTimestamp() {
|
||||
return _timestamp;
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public void setTimestamp(Long timestamp) {
|
||||
_timestamp = timestamp;
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
buf.append(_uploadId).append(SEPARATOR)
|
||||
.append(_filePosition.toString()).append(SEPARATOR);
|
||||
if (_partETags.size() > 0) {
|
||||
buf.append(uploadId).append(SEPARATOR)
|
||||
.append(filePosition.toString()).append(SEPARATOR);
|
||||
if (partETags.size() > 0) {
|
||||
boolean first = true;
|
||||
for (PartETag tag : _partETags) {
|
||||
for (PartETag tag : partETags) {
|
||||
if (!first) {
|
||||
buf.append(",");
|
||||
} else {
|
||||
|
@ -1092,10 +1082,10 @@ public class PutS3Object extends AbstractS3Processor {
|
|||
}
|
||||
}
|
||||
buf.append(SEPARATOR)
|
||||
.append(_partSize.toString()).append(SEPARATOR)
|
||||
.append(_storageClass.toString()).append(SEPARATOR)
|
||||
.append(_contentLength.toString()).append(SEPARATOR)
|
||||
.append(_timestamp.toString());
|
||||
.append(partSize.toString()).append(SEPARATOR)
|
||||
.append(storageClass.toString()).append(SEPARATOR)
|
||||
.append(contentLength.toString()).append(SEPARATOR)
|
||||
.append(timestamp.toString());
|
||||
return buf.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,8 +40,6 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
import org.apache.nifi.util.StringUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -105,7 +103,7 @@ public class TagS3Object extends AbstractS3Processor {
|
|||
.required(false)
|
||||
.build();
|
||||
|
||||
public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(Arrays.asList(
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
BUCKET_WITH_DEFAULT_VALUE,
|
||||
KEY,
|
||||
S3_REGION,
|
||||
|
@ -114,20 +112,13 @@ public class TagS3Object extends AbstractS3Processor {
|
|||
TAG_VALUE,
|
||||
APPEND_TAG,
|
||||
VERSION_ID,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
TIMEOUT,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
ENDPOINT_OVERRIDE,
|
||||
SIGNER_OVERRIDE,
|
||||
S3_CUSTOM_SIGNER_CLASS_NAME,
|
||||
S3_CUSTOM_SIGNER_MODULE_LOCATION,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD));
|
||||
PROXY_CONFIGURATION_SERVICE);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
@ -215,12 +206,12 @@ public class TagS3Object extends AbstractS3Processor {
|
|||
session.transfer(flowFile, REL_SUCCESS);
|
||||
final String url = s3.getResourceUrl(bucket, key);
|
||||
final long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
|
||||
getLogger().info("Successfully tagged S3 Object for {} in {} millis; routing to success", new Object[]{flowFile, transferMillis});
|
||||
getLogger().info("Successfully tagged S3 Object for {} in {} millis; routing to success", flowFile, transferMillis);
|
||||
session.getProvenanceReporter().invokeRemoteProcess(flowFile, url, "Object tagged");
|
||||
}
|
||||
|
||||
private void failFlowWithBlankEvaluatedProperty(ProcessSession session, FlowFile flowFile, PropertyDescriptor pd) {
|
||||
getLogger().error("{} value is blank after attribute expression language evaluation", new Object[]{pd.getName()});
|
||||
getLogger().error("{} value is blank after attribute expression language evaluation", pd.getName());
|
||||
flowFile = session.penalize(flowFile);
|
||||
session.transfer(flowFile, REL_FAILURE);
|
||||
}
|
||||
|
@ -229,7 +220,7 @@ public class TagS3Object extends AbstractS3Processor {
|
|||
flowFile = session.removeAllAttributes(flowFile, Pattern.compile("^s3\\.tag\\..*"));
|
||||
|
||||
final Map<String, String> tagAttrs = new HashMap<>();
|
||||
tags.stream().forEach(t -> tagAttrs.put("s3.tag." + t.getKey(), t.getValue()));
|
||||
tags.forEach(t -> tagAttrs.put("s3.tag." + t.getKey(), t.getValue()));
|
||||
flowFile = session.putAllAttributes(flowFile, tagAttrs);
|
||||
return flowFile;
|
||||
}
|
||||
|
|
|
@ -127,17 +127,10 @@ public class PutSNS extends AbstractAwsSyncProcessor<SnsClient, SnsClientBuilder
|
|||
ARN_TYPE,
|
||||
SUBJECT,
|
||||
REGION,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
TIMEOUT,
|
||||
USE_JSON_STRUCTURE,
|
||||
CHARACTER_ENCODING,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD,
|
||||
MESSAGEGROUPID,
|
||||
MESSAGEDEDUPLICATIONID);
|
||||
|
||||
|
|
|
@ -64,19 +64,13 @@ public class DeleteSQS extends AbstractAwsSyncProcessor<SqsClient, SqsClientBuil
|
|||
.build();
|
||||
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
QUEUE_URL,
|
||||
RECEIPT_HANDLE,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
REGION,
|
||||
TIMEOUT,
|
||||
ENDPOINT_OVERRIDE,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD);
|
||||
QUEUE_URL,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
RECEIPT_HANDLE,
|
||||
TIMEOUT,
|
||||
ENDPOINT_OVERRIDE,
|
||||
PROXY_CONFIGURATION_SERVICE);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
|
|
@ -118,23 +118,17 @@ public class GetSQS extends AbstractAwsSyncProcessor<SqsClient, SqsClientBuilder
|
|||
.build();
|
||||
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
QUEUE_URL,
|
||||
AUTO_DELETE,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
REGION,
|
||||
BATCH_SIZE,
|
||||
TIMEOUT,
|
||||
ENDPOINT_OVERRIDE,
|
||||
CHARSET,
|
||||
VISIBILITY_TIMEOUT,
|
||||
RECEIVE_MSG_WAIT_TIME,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD);
|
||||
QUEUE_URL,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
AUTO_DELETE,
|
||||
BATCH_SIZE,
|
||||
TIMEOUT,
|
||||
ENDPOINT_OVERRIDE,
|
||||
CHARSET,
|
||||
VISIBILITY_TIMEOUT,
|
||||
RECEIVE_MSG_WAIT_TIME,
|
||||
PROXY_CONFIGURATION_SERVICE);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
|
|
@ -103,21 +103,15 @@ public class PutSQS extends AbstractAwsSyncProcessor<SqsClient, SqsClientBuilder
|
|||
.build();
|
||||
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
QUEUE_URL,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
REGION,
|
||||
DELAY,
|
||||
TIMEOUT,
|
||||
ENDPOINT_OVERRIDE,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD,
|
||||
MESSAGEGROUPID,
|
||||
MESSAGEDEDUPLICATIONID);
|
||||
QUEUE_URL,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
DELAY,
|
||||
TIMEOUT,
|
||||
ENDPOINT_OVERRIDE,
|
||||
PROXY_CONFIGURATION_SERVICE,
|
||||
MESSAGEGROUPID,
|
||||
MESSAGEDEDUPLICATIONID);
|
||||
|
||||
private volatile List<PropertyDescriptor> userDefinedProperties = Collections.emptyList();
|
||||
|
||||
|
@ -194,13 +188,13 @@ public class PutSQS extends AbstractAwsSyncProcessor<SqsClient, SqsClientBuilder
|
|||
throw new ProcessException(response.failed().get(0).toString());
|
||||
}
|
||||
} catch (final Exception e) {
|
||||
getLogger().error("Failed to send messages to Amazon SQS due to {}; routing to failure", new Object[]{e});
|
||||
getLogger().error("Failed to send messages to Amazon SQS; routing to failure", e);
|
||||
flowFile = session.penalize(flowFile);
|
||||
session.transfer(flowFile, REL_FAILURE);
|
||||
return;
|
||||
}
|
||||
|
||||
getLogger().info("Successfully published message to Amazon SQS for {}", new Object[]{flowFile});
|
||||
getLogger().info("Successfully published message to Amazon SQS for {}", flowFile);
|
||||
session.transfer(flowFile, REL_SUCCESS);
|
||||
final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
|
||||
session.getProvenanceReporter().send(flowFile, queueUrl, transmissionMillis);
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.apache.nifi.processors.aws.wag.client.GenericApiGatewayResponse;
|
|||
import org.apache.nifi.stream.io.StreamUtils;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -74,32 +75,24 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
|
||||
private static final Set<String> IDEMPOTENT_METHODS = new HashSet<>(Arrays.asList("GET", "HEAD", "OPTIONS"));
|
||||
|
||||
public static final List<PropertyDescriptor> properties = Collections.unmodifiableList(Arrays
|
||||
.asList(
|
||||
PROP_METHOD,
|
||||
REGION,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
TIMEOUT,
|
||||
PROP_RESOURCE_NAME,
|
||||
PROP_AWS_GATEWAY_API_ENDPOINT,
|
||||
PROP_AWS_API_KEY,
|
||||
PROP_ATTRIBUTES_TO_SEND,
|
||||
PROP_PUT_OUTPUT_IN_ATTRIBUTE,
|
||||
PROP_CONTENT_TYPE,
|
||||
PROP_SEND_BODY,
|
||||
PROP_OUTPUT_RESPONSE_REGARDLESS,
|
||||
PROP_PENALIZE_NO_RETRY,
|
||||
PROXY_HOST,
|
||||
PROXY_HOST_PORT,
|
||||
PROXY_USERNAME,
|
||||
PROXY_PASSWORD,
|
||||
PROP_QUERY_PARAMS,
|
||||
PROP_PUT_ATTRIBUTE_MAX_LENGTH,
|
||||
PROP_ADD_HEADERS_TO_REQUEST,
|
||||
PROXY_CONFIGURATION_SERVICE));
|
||||
public static final List<PropertyDescriptor> properties = List.of(
|
||||
PROP_METHOD,
|
||||
REGION,
|
||||
AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
TIMEOUT,
|
||||
PROP_RESOURCE_NAME,
|
||||
PROP_AWS_GATEWAY_API_ENDPOINT,
|
||||
PROP_AWS_API_KEY,
|
||||
PROP_ATTRIBUTES_TO_SEND,
|
||||
PROP_PUT_OUTPUT_IN_ATTRIBUTE,
|
||||
PROP_CONTENT_TYPE,
|
||||
PROP_SEND_BODY,
|
||||
PROP_OUTPUT_RESPONSE_REGARDLESS,
|
||||
PROP_PENALIZE_NO_RETRY,
|
||||
PROP_QUERY_PARAMS,
|
||||
PROP_PUT_ATTRIBUTE_MAX_LENGTH,
|
||||
PROP_ADD_HEADERS_TO_REQUEST,
|
||||
PROXY_CONFIGURATION_SERVICE);
|
||||
|
||||
|
||||
public static final Relationship REL_SUCCESS_REQ = new Relationship.Builder()
|
||||
|
@ -133,8 +126,7 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
+ "exception. It will have new attributes detailing the request.")
|
||||
.build();
|
||||
|
||||
public static final Set<Relationship> RELATIONSHIPS = Collections.unmodifiableSet(new HashSet<>(
|
||||
Arrays.asList(REL_SUCCESS_REQ, REL_RESPONSE, REL_RETRY, REL_NO_RETRY, REL_FAILURE)));
|
||||
public static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS_REQ, REL_RESPONSE, REL_RETRY, REL_NO_RETRY, REL_FAILURE);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
|
@ -162,8 +154,7 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
// Checking to see if the property to put the body of the response in an attribute was set
|
||||
boolean putToAttribute = context.getProperty(PROP_PUT_OUTPUT_IN_ATTRIBUTE).isSet();
|
||||
if (requestFlowFile == null) {
|
||||
String request = context.getProperty(PROP_METHOD).evaluateAttributeExpressions()
|
||||
.getValue().toUpperCase();
|
||||
final String request = context.getProperty(PROP_METHOD).evaluateAttributeExpressions().getValue().toUpperCase();
|
||||
if ("POST".equals(request) || "PUT".equals(request) || "PATCH".equals(request)) {
|
||||
return;
|
||||
} else if (putToAttribute) {
|
||||
|
@ -176,8 +167,7 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
FlowFile responseFlowFile = null;
|
||||
|
||||
try {
|
||||
final int maxAttributeSize = context.getProperty(PROP_PUT_ATTRIBUTE_MAX_LENGTH)
|
||||
.asInteger();
|
||||
final int maxAttributeSize = context.getProperty(PROP_PUT_ATTRIBUTE_MAX_LENGTH).asInteger();
|
||||
|
||||
final String resourceName = context.getProperty(PROP_RESOURCE_NAME).getValue();
|
||||
|
||||
|
@ -192,8 +182,7 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
final int statusCode = gatewayResponse.statusCode;
|
||||
|
||||
final String endpoint = context.getProperty(PROP_AWS_GATEWAY_API_ENDPOINT).getValue();
|
||||
final boolean outputRegardless = context.getProperty(PROP_OUTPUT_RESPONSE_REGARDLESS)
|
||||
.asBoolean();
|
||||
final boolean outputRegardless = context.getProperty(PROP_OUTPUT_RESPONSE_REGARDLESS).asBoolean();
|
||||
|
||||
boolean outputBodyToResponseContent = (isSuccess(statusCode) && !putToAttribute || outputRegardless);
|
||||
boolean outputBodyToRequestAttribute = (!isSuccess(statusCode) || putToAttribute) && requestFlowFile != null;
|
||||
|
@ -241,11 +230,9 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
// write the response headers as attributes
|
||||
// this will overwrite any existing flowfile attributes
|
||||
if (response != null) {
|
||||
responseFlowFile = session
|
||||
.putAllAttributes(responseFlowFile, convertAttributesFromHeaders(response));
|
||||
responseFlowFile = session.putAllAttributes(responseFlowFile, convertAttributesFromHeaders(response));
|
||||
} else {
|
||||
responseFlowFile = session
|
||||
.putAllAttributes(responseFlowFile, exception.getHttpHeaders());
|
||||
responseFlowFile = session.putAllAttributes(responseFlowFile, exception.getHttpHeaders());
|
||||
}
|
||||
// transfer the message body to the payload
|
||||
// can potentially be null in edge cases
|
||||
|
@ -266,17 +253,11 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
}
|
||||
} else if (exception != null) {
|
||||
final String contentType = "application/json";
|
||||
responseFlowFile = session
|
||||
.putAttribute(responseFlowFile, CoreAttributes.MIME_TYPE.key(),
|
||||
contentType.trim());
|
||||
|
||||
responseFlowFile = session
|
||||
.importFrom(new ByteArrayInputStream(exception.getRawResponse()),
|
||||
responseFlowFile);
|
||||
responseFlowFile = session.putAttribute(responseFlowFile, CoreAttributes.MIME_TYPE.key(), contentType.trim());
|
||||
responseFlowFile = session.importFrom(new ByteArrayInputStream(exception.getRawResponse()), responseFlowFile);
|
||||
|
||||
// emit provenance event
|
||||
final long millis = TimeUnit.NANOSECONDS
|
||||
.toMillis(System.nanoTime() - startNanos);
|
||||
final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
|
||||
if (requestFlowFile != null) {
|
||||
session.getProvenanceReporter().fetch(responseFlowFile, endpoint, millis);
|
||||
} else {
|
||||
|
@ -286,9 +267,7 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
}
|
||||
// if not successful and request flowfile is not null, store the response body into a flowfile attribute
|
||||
if (outputBodyToRequestAttribute) {
|
||||
String attributeKey = context.getProperty(PROP_PUT_OUTPUT_IN_ATTRIBUTE)
|
||||
.evaluateAttributeExpressions(requestFlowFile)
|
||||
.getValue();
|
||||
String attributeKey = context.getProperty(PROP_PUT_OUTPUT_IN_ATTRIBUTE).evaluateAttributeExpressions(requestFlowFile).getValue();
|
||||
if (attributeKey == null) {
|
||||
attributeKey = RESPONSE_BODY;
|
||||
}
|
||||
|
@ -296,20 +275,14 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
int size = 0;
|
||||
outputBuffer = new byte[maxAttributeSize];
|
||||
if (bodyExists) {
|
||||
size = StreamUtils
|
||||
.fillBuffer(new ByteArrayInputStream(response.getBody().getBytes()),
|
||||
outputBuffer, false);
|
||||
} else if (exception != null && exception.getRawResponse() != null
|
||||
&& exception.getRawResponse().length > 0) {
|
||||
size = StreamUtils
|
||||
.fillBuffer(new ByteArrayInputStream(exception.getRawResponse()),
|
||||
outputBuffer, false);
|
||||
size = StreamUtils.fillBuffer(new ByteArrayInputStream(response.getBody().getBytes()), outputBuffer, false);
|
||||
} else if (exception != null && exception.getRawResponse() != null && exception.getRawResponse().length > 0) {
|
||||
size = StreamUtils.fillBuffer(new ByteArrayInputStream(exception.getRawResponse()), outputBuffer, false);
|
||||
}
|
||||
|
||||
if (size > 0) {
|
||||
String bodyString = new String(outputBuffer, 0, size, "UTF-8");
|
||||
requestFlowFile = session
|
||||
.putAttribute(requestFlowFile, attributeKey, bodyString);
|
||||
String bodyString = new String(outputBuffer, 0, size, StandardCharsets.UTF_8);
|
||||
requestFlowFile = session.putAttribute(requestFlowFile, attributeKey, bodyString);
|
||||
}
|
||||
|
||||
requestFlowFile = session.putAllAttributes(requestFlowFile, statusAttributes);
|
||||
|
@ -319,20 +292,16 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
.format("The %s has been added. The value of which is the body of a http call to %s%s. It took %s millis,", attributeKey, endpoint, resourceName, millis));
|
||||
}
|
||||
|
||||
route(requestFlowFile, responseFlowFile, session, context, statusCode,
|
||||
getRelationships());
|
||||
route(requestFlowFile, responseFlowFile, session, context, statusCode, getRelationships());
|
||||
} catch (final Exception e) {
|
||||
// penalize or yield
|
||||
if (requestFlowFile != null) {
|
||||
logger.error("Routing to {} due to exception: {}", REL_FAILURE.getName(), e, e);
|
||||
requestFlowFile = session.penalize(requestFlowFile);
|
||||
requestFlowFile = session
|
||||
.putAttribute(requestFlowFile, EXCEPTION_CLASS, e.getClass().getName());
|
||||
requestFlowFile = session
|
||||
.putAttribute(requestFlowFile, EXCEPTION_MESSAGE, e.getMessage());
|
||||
requestFlowFile = session.putAttribute(requestFlowFile, EXCEPTION_CLASS, e.getClass().getName());
|
||||
requestFlowFile = session.putAttribute(requestFlowFile, EXCEPTION_MESSAGE, e.getMessage());
|
||||
// transfer original to failure
|
||||
session.transfer(requestFlowFile,
|
||||
getRelationshipForName(REL_FAILURE_NAME, getRelationships()));
|
||||
session.transfer(requestFlowFile, getRelationshipForName(REL_FAILURE_NAME, getRelationships()));
|
||||
} else {
|
||||
logger.error("Yielding processor due to exception encountered as a source processor: {}", e);
|
||||
context.yield();
|
||||
|
@ -373,7 +342,7 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
method, endpoint, resource, statusExplanation, gatewayResponse.statusCode);
|
||||
} else {
|
||||
final String statusExplanation = gatewayResponse.response.getHttpResponse().getStatusText();
|
||||
explanation = String.format("Successfully invoked AWS Gateway API [%s %s%/s] with blank request body, receiving success response [%s] with status code [%s]",
|
||||
explanation = String.format("Successfully invoked AWS Gateway API [%s %s/%s] with blank request body, receiving success response [%s] with status code [%s]",
|
||||
method, endpoint, resource, statusExplanation, gatewayResponse.statusCode);
|
||||
}
|
||||
results.add(new ConfigVerificationResult.Builder()
|
||||
|
@ -425,15 +394,6 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
|
|||
return new GatewayResponse(response, exception, statusCode);
|
||||
}
|
||||
|
||||
private class GatewayResponse {
|
||||
private final GenericApiGatewayResponse response;
|
||||
private final GenericApiGatewayException exception;
|
||||
private final int statusCode;
|
||||
|
||||
private GatewayResponse(final GenericApiGatewayResponse response, final GenericApiGatewayException exception, final int statusCode) {
|
||||
this.response = response;
|
||||
this.exception = exception;
|
||||
this.statusCode = statusCode;
|
||||
}
|
||||
private record GatewayResponse(GenericApiGatewayResponse response, GenericApiGatewayException exception, int statusCode) {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,17 +16,15 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.aws.cloudwatch;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.nifi.processors.aws.AbstractAWSCredentialsProviderProcessor;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import org.apache.nifi.processors.aws.sns.PutSNS;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import static org.junit.jupiter.api.Assumptions.assumeTrue;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import static org.junit.jupiter.api.Assumptions.assumeTrue;
|
||||
|
||||
/**
|
||||
* Provides integration level testing with actual AWS CloudWatch resources for
|
||||
* {@link PutCloudWatchMetric} and requires additional configuration and resources to work.
|
||||
|
@ -36,15 +34,16 @@ public class ITPutCloudWatchMetric {
|
|||
private final String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
|
||||
|
||||
@Test
|
||||
public void ifCredentialsThenTestPublish() throws IOException {
|
||||
public void ifCredentialsThenTestPublish() {
|
||||
final TestRunner runner = TestRunners.newTestRunner(new PutCloudWatchMetric());
|
||||
File credsFile = new File(CREDENTIALS_FILE);
|
||||
assumeTrue(credsFile.exists());
|
||||
|
||||
AuthUtils.enableCredentialsFile(runner, CREDENTIALS_FILE);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "Test");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "Test");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1.0");
|
||||
runner.setProperty(PutCloudWatchMetric.CREDENTIALS_FILE, CREDENTIALS_FILE);
|
||||
|
||||
runner.enqueue(new byte[] {});
|
||||
runner.run();
|
||||
|
@ -53,23 +52,16 @@ public class ITPutCloudWatchMetric {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void ifCredentialsThenTestPublishWithCredentialsProviderService() throws Throwable {
|
||||
public void ifCredentialsThenTestPublishWithCredentialsProviderService() {
|
||||
final TestRunner runner = TestRunners.newTestRunner(new PutCloudWatchMetric());
|
||||
File credsFile = new File(CREDENTIALS_FILE);
|
||||
assumeTrue(credsFile.exists());
|
||||
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
|
||||
runner.setProperty(serviceImpl, AbstractAWSCredentialsProviderProcessor.CREDENTIALS_FILE, System.getProperty("user.home") + "/aws-credentials.properties");
|
||||
runner.enableControllerService(serviceImpl);
|
||||
|
||||
runner.assertValid(serviceImpl);
|
||||
AuthUtils.enableCredentialsFile(runner, credsFile.getAbsolutePath());
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "Test");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "Test");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1.0");
|
||||
runner.setProperty(PutSNS.AWS_CREDENTIALS_PROVIDER_SERVICE, "awsCredentialsProvider");
|
||||
|
||||
runner.enqueue(new byte[] {});
|
||||
runner.run();
|
||||
|
|
|
@ -17,8 +17,10 @@
|
|||
package org.apache.nifi.processors.aws.cloudwatch;
|
||||
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
|
@ -28,7 +30,6 @@ import software.amazon.awssdk.services.cloudwatch.model.Dimension;
|
|||
import software.amazon.awssdk.services.cloudwatch.model.MetricDatum;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -37,18 +38,23 @@ import java.util.stream.Stream;
|
|||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link PutCloudWatchMetric}.
|
||||
*/
|
||||
public class TestPutCloudWatchMetric {
|
||||
|
||||
@Test
|
||||
public void testPutSimpleMetric() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
private TestRunner runner;
|
||||
private MockPutCloudWatchMetric mockPutCloudWatchMetric;
|
||||
|
||||
@BeforeEach
|
||||
public void setup() {
|
||||
mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPutSimpleMetric() {
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1.0");
|
||||
runner.setProperty(PutCloudWatchMetric.UNIT, "Count");
|
||||
runner.setProperty(PutCloudWatchMetric.TIMESTAMP, "1476296132575");
|
||||
|
@ -67,32 +73,17 @@ public class TestPutCloudWatchMetric {
|
|||
|
||||
@Test
|
||||
public void testValueLiteralDoubleInvalid() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "nan");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMissingBothValueAndStatisticSetInvalid() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testContainsBothValueAndStatisticSetInvalid() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1.0");
|
||||
runner.setProperty(PutCloudWatchMetric.UNIT, "Count");
|
||||
runner.setProperty(PutCloudWatchMetric.TIMESTAMP, "1476296132575");
|
||||
|
@ -105,11 +96,6 @@ public class TestPutCloudWatchMetric {
|
|||
|
||||
@Test
|
||||
public void testContainsIncompleteStatisticSetInvalid() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.UNIT, "Count");
|
||||
runner.setProperty(PutCloudWatchMetric.TIMESTAMP, "1476296132575");
|
||||
runner.setProperty(PutCloudWatchMetric.MINIMUM, "1.0");
|
||||
|
@ -121,11 +107,6 @@ public class TestPutCloudWatchMetric {
|
|||
|
||||
@Test
|
||||
public void testContainsBothValueAndIncompleteStatisticSetInvalid() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1.0");
|
||||
runner.setProperty(PutCloudWatchMetric.UNIT, "Count");
|
||||
runner.setProperty(PutCloudWatchMetric.TIMESTAMP, "1476296132575");
|
||||
|
@ -135,11 +116,6 @@ public class TestPutCloudWatchMetric {
|
|||
|
||||
@Test
|
||||
public void testMetricExpressionValid() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "${metric.value}");
|
||||
runner.assertValid();
|
||||
|
||||
|
@ -158,11 +134,6 @@ public class TestPutCloudWatchMetric {
|
|||
|
||||
@Test
|
||||
public void testStatisticSet() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.MINIMUM, "${metric.min}");
|
||||
runner.setProperty(PutCloudWatchMetric.MAXIMUM, "${metric.max}");
|
||||
runner.setProperty(PutCloudWatchMetric.SUM, "${metric.sum}");
|
||||
|
@ -190,11 +161,6 @@ public class TestPutCloudWatchMetric {
|
|||
|
||||
@Test
|
||||
public void testDimensions() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1.0");
|
||||
runner.setProperty(PutCloudWatchMetric.UNIT, "Count");
|
||||
runner.setProperty(PutCloudWatchMetric.TIMESTAMP, "1476296132575");
|
||||
|
@ -215,7 +181,7 @@ public class TestPutCloudWatchMetric {
|
|||
assertEquals(1d, datum.value(), 0.0001d);
|
||||
|
||||
List<Dimension> dimensions = new ArrayList<>(datum.dimensions());
|
||||
Collections.sort(dimensions, Comparator.comparing(Dimension::name));
|
||||
dimensions.sort(Comparator.comparing(Dimension::name));
|
||||
assertEquals(2, dimensions.size());
|
||||
assertEquals("dim1", dimensions.get(0).name());
|
||||
assertEquals("1", dimensions.get(0).value());
|
||||
|
@ -225,11 +191,6 @@ public class TestPutCloudWatchMetric {
|
|||
|
||||
@Test
|
||||
public void testMaximumDimensions() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1.0");
|
||||
runner.setProperty(PutCloudWatchMetric.UNIT, "Count");
|
||||
runner.setProperty(PutCloudWatchMetric.TIMESTAMP, "1476296132575");
|
||||
|
@ -241,11 +202,6 @@ public class TestPutCloudWatchMetric {
|
|||
|
||||
@Test
|
||||
public void testTooManyDimensions() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1.0");
|
||||
runner.setProperty(PutCloudWatchMetric.UNIT, "Count");
|
||||
runner.setProperty(PutCloudWatchMetric.TIMESTAMP, "1476296132575");
|
||||
|
@ -257,11 +213,6 @@ public class TestPutCloudWatchMetric {
|
|||
|
||||
@Test
|
||||
public void testMetricExpressionInvalidRoutesToFailure() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "${metric.value}");
|
||||
runner.assertValid();
|
||||
|
||||
|
@ -277,11 +228,6 @@ public class TestPutCloudWatchMetric {
|
|||
@ParameterizedTest
|
||||
@CsvSource({"nan","percent","count"})
|
||||
public void testInvalidUnit(String unit) {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.UNIT, unit);
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1.0");
|
||||
runner.assertNotValid();
|
||||
|
@ -294,11 +240,6 @@ public class TestPutCloudWatchMetric {
|
|||
@ParameterizedTest
|
||||
@MethodSource("data")
|
||||
public void testValidUnit(String unit) {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.UNIT, unit);
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1");
|
||||
runner.assertValid();
|
||||
|
@ -306,11 +247,6 @@ public class TestPutCloudWatchMetric {
|
|||
|
||||
@Test
|
||||
public void testTimestampExpressionInvalidRoutesToFailure() {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "TestNamespace");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "TestMetric");
|
||||
runner.setProperty(PutCloudWatchMetric.UNIT, "Count");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "1");
|
||||
runner.setProperty(PutCloudWatchMetric.TIMESTAMP, "${timestamp.value}");
|
||||
|
@ -329,11 +265,6 @@ public class TestPutCloudWatchMetric {
|
|||
@ParameterizedTest
|
||||
@CsvSource({"null","us-west-100","us-east-a"})
|
||||
public void testInvalidRegion(String region) {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "Test");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "Test");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "6");
|
||||
runner.setProperty(PutCloudWatchMetric.REGION, region);
|
||||
runner.assertNotValid();
|
||||
|
@ -342,11 +273,6 @@ public class TestPutCloudWatchMetric {
|
|||
@ParameterizedTest
|
||||
@CsvSource({"us-east-1","us-west-1","us-east-2"})
|
||||
public void testValidRegionRoutesToSuccess(String region) {
|
||||
MockPutCloudWatchMetric mockPutCloudWatchMetric = new MockPutCloudWatchMetric();
|
||||
final TestRunner runner = TestRunners.newTestRunner(mockPutCloudWatchMetric);
|
||||
|
||||
runner.setProperty(PutCloudWatchMetric.NAMESPACE, "Test");
|
||||
runner.setProperty(PutCloudWatchMetric.METRIC_NAME, "Test");
|
||||
runner.setProperty(PutCloudWatchMetric.VALUE, "6");
|
||||
runner.setProperty(PutCloudWatchMetric.REGION, region);
|
||||
runner.assertValid();
|
||||
|
|
|
@ -1,309 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.nifi.processors.aws.credentials.provider.factory;
|
||||
|
||||
import com.amazonaws.SignableRequest;
|
||||
import com.amazonaws.auth.AWS4Signer;
|
||||
import com.amazonaws.auth.AWSCredentials;
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import com.amazonaws.auth.AnonymousAWSCredentials;
|
||||
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
|
||||
import com.amazonaws.auth.PropertiesFileCredentialsProvider;
|
||||
import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider;
|
||||
import com.amazonaws.auth.Signer;
|
||||
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.PropertiesCredentialsProvider;
|
||||
import org.apache.nifi.processors.aws.s3.FetchS3Object;
|
||||
import org.apache.nifi.processors.aws.signer.AwsSignerType;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider;
|
||||
import software.amazon.awssdk.regions.Region;
|
||||
import software.amazon.awssdk.services.sts.auth.StsAssumeRoleCredentialsProvider;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
/**
|
||||
* Tests of the validation and credentials provider capabilities of CredentialsProviderFactory.
|
||||
*/
|
||||
public class TestCredentialsProviderFactory {
|
||||
|
||||
@Test
|
||||
public void testImpliedDefaultCredentials() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.assertValid();
|
||||
|
||||
final CredentialsProviderFactory factory = new CredentialsProviderFactory();
|
||||
final AWSCredentialsProvider credentialsProvider = factory.getCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(DefaultAWSCredentialsProviderChain.class,
|
||||
credentialsProvider.getClass(), "credentials provider should be equal");
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = factory.getAwsCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(DefaultCredentialsProvider.class,
|
||||
credentialsProviderV2.getClass(), "credentials provider should be equal");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExplicitDefaultCredentials() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.USE_DEFAULT_CREDENTIALS, "true");
|
||||
runner.assertValid();
|
||||
|
||||
final CredentialsProviderFactory factory = new CredentialsProviderFactory();
|
||||
final AWSCredentialsProvider credentialsProvider = factory.getCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(DefaultAWSCredentialsProviderChain.class,
|
||||
credentialsProvider.getClass(), "credentials provider should be equal");
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = factory.getAwsCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(DefaultCredentialsProvider.class,
|
||||
credentialsProviderV2.getClass(), "credentials provider should be equal");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExplicitDefaultCredentialsExclusive() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.USE_DEFAULT_CREDENTIALS, "true");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ACCESS_KEY_ID, "BogusAccessKey");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAccessKeyPairCredentials() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.USE_DEFAULT_CREDENTIALS, "false");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ACCESS_KEY_ID, "BogusAccessKey");
|
||||
runner.setProperty(CredentialPropertyDescriptors.SECRET_KEY, "BogusSecretKey");
|
||||
runner.assertValid();
|
||||
|
||||
|
||||
final CredentialsProviderFactory factory = new CredentialsProviderFactory();
|
||||
final AWSCredentialsProvider credentialsProvider = factory.getCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProvider);
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = factory.getAwsCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(software.amazon.awssdk.auth.credentials.StaticCredentialsProvider.class,
|
||||
credentialsProviderV2.getClass(), "credentials provider should be equal");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAccessKeyPairIncomplete() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.ACCESS_KEY_ID, "BogusAccessKey");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAccessKeyPairIncompleteS3() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.ACCESS_KEY_ID, "BogusAccessKey");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFileCredentials() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.assertValid();
|
||||
|
||||
final CredentialsProviderFactory factory = new CredentialsProviderFactory();
|
||||
final AWSCredentialsProvider credentialsProvider = factory.getCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(PropertiesFileCredentialsProvider.class,
|
||||
credentialsProvider.getClass(), "credentials provider should be equal");
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = factory.getAwsCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(PropertiesCredentialsProvider.class,
|
||||
credentialsProviderV2.getClass(), "credentials provider should be equal");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleCredentials() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.assertValid();
|
||||
|
||||
final CredentialsProviderFactory factory = new CredentialsProviderFactory();
|
||||
final AWSCredentialsProvider credentialsProvider = factory.getCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(STSAssumeRoleSessionCredentialsProvider.class,
|
||||
credentialsProvider.getClass(), "credentials provider should be equal");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleCredentialsInvalidSessionTime() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.setProperty(CredentialPropertyDescriptors.MAX_SESSION_TIME, "10");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAnonymousCredentials() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.USE_ANONYMOUS_CREDENTIALS, "true");
|
||||
runner.assertValid();
|
||||
|
||||
final CredentialsProviderFactory factory = new CredentialsProviderFactory();
|
||||
final AWSCredentialsProvider credentialsProvider = factory.getCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProvider);
|
||||
final AWSCredentials creds = credentialsProvider.getCredentials();
|
||||
assertEquals(AnonymousAWSCredentials.class, creds.getClass(), "credentials should be equal");
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = factory.getAwsCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(AnonymousCredentialsProvider.class,
|
||||
credentialsProviderV2.getClass(), "credentials provider should be equal");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAnonymousAndDefaultCredentials() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.USE_DEFAULT_CREDENTIALS, "true");
|
||||
runner.setProperty(CredentialPropertyDescriptors.USE_ANONYMOUS_CREDENTIALS, "true");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNamedProfileCredentials() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.USE_DEFAULT_CREDENTIALS, "false");
|
||||
runner.setProperty(CredentialPropertyDescriptors.PROFILE_NAME, "BogusProfile");
|
||||
runner.assertValid();
|
||||
|
||||
final CredentialsProviderFactory factory = new CredentialsProviderFactory();
|
||||
final AWSCredentialsProvider credentialsProvider = factory.getCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(ProfileCredentialsProvider.class,
|
||||
credentialsProvider.getClass(), "credentials provider should be equal");
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = factory.getAwsCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider.class,
|
||||
credentialsProviderV2.getClass(), "credentials provider should be equal");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleCredentialsWithProxy() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_STS_REGION, Region.US_WEST_2.id());
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_HOST, "proxy.company.com");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_PORT, "8080");
|
||||
runner.assertValid();
|
||||
|
||||
final CredentialsProviderFactory factory = new CredentialsProviderFactory();
|
||||
final AWSCredentialsProvider credentialsProvider = factory.getCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(STSAssumeRoleSessionCredentialsProvider.class,
|
||||
credentialsProvider.getClass(), "credentials provider should be equal");
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = factory.getAwsCredentialsProvider(runner.getProcessContext());
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(StsAssumeRoleCredentialsProvider.class,
|
||||
credentialsProviderV2.getClass(), "credentials provider should be equal");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleMissingProxyHost() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_PORT, "8080");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleMissingProxyPort() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_HOST, "proxy.company.com");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleInvalidProxyPort() throws Throwable {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_HOST, "proxy.company.com");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_PORT, "notIntPort");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleCredentialsWithCustomSigner() {
|
||||
final TestRunner runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
runner.setProperty(CredentialPropertyDescriptors.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_STS_SIGNER_OVERRIDE, AwsSignerType.CUSTOM_SIGNER.getValue());
|
||||
runner.setProperty(CredentialPropertyDescriptors.ASSUME_ROLE_STS_CUSTOM_SIGNER_CLASS_NAME, CustomSTSSigner.class.getName());
|
||||
runner.assertValid();
|
||||
|
||||
final CredentialsProviderFactory factory = new CredentialsProviderFactory();
|
||||
|
||||
final Signer signerChecker = mock(Signer.class);
|
||||
CustomSTSSigner.setSignerChecker(signerChecker);
|
||||
|
||||
final AWSCredentialsProvider credentialsProvider = factory.getCredentialsProvider(runner.getProcessContext());
|
||||
|
||||
try {
|
||||
credentialsProvider.getCredentials();
|
||||
} catch (Exception e) {
|
||||
// Expected to fail, we are only interested in the Signer
|
||||
}
|
||||
|
||||
verify(signerChecker).sign(any(), any());
|
||||
}
|
||||
|
||||
public static class CustomSTSSigner extends AWS4Signer {
|
||||
|
||||
private static final ThreadLocal<Signer> SIGNER_CHECKER = new ThreadLocal<>();
|
||||
|
||||
public static void setSignerChecker(Signer signerChecker) {
|
||||
SIGNER_CHECKER.set(signerChecker);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sign(SignableRequest<?> request, AWSCredentials credentials) {
|
||||
SIGNER_CHECKER.get().sign(request, credentials);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -20,7 +20,6 @@ import com.amazonaws.auth.AWSCredentialsProvider;
|
|||
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
|
||||
import com.amazonaws.auth.PropertiesFileCredentialsProvider;
|
||||
import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.s3.FetchS3Object;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -29,6 +28,9 @@ import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
|||
import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider;
|
||||
import software.amazon.awssdk.regions.Region;
|
||||
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ACCESS_KEY_ID;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.CREDENTIALS_FILE;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.SECRET_KEY;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
|
||||
|
@ -57,8 +59,8 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
runner.enableControllerService(serviceImpl);
|
||||
|
||||
runner.assertValid(serviceImpl);
|
||||
|
@ -74,8 +76,8 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_REGION, Region.US_WEST_1.id());
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "Role");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "RoleName");
|
||||
|
@ -96,8 +98,8 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_REGION, Region.US_WEST_1.id());
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "Role");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "RoleName");
|
||||
|
@ -119,8 +121,8 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_REGION, Region.US_WEST_1.id());
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "Role");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "RoleName");
|
||||
|
@ -135,8 +137,8 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_REGION, Region.US_WEST_1.id());
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "Role");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "RoleName");
|
||||
|
@ -151,8 +153,8 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "Role");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "RoleName");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.MAX_SESSION_TIME, "899");
|
||||
|
@ -164,8 +166,8 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "Role");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "RoleName");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.MAX_SESSION_TIME, "899");
|
||||
|
@ -177,8 +179,8 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "Role");
|
||||
|
||||
runner.assertNotValid(serviceImpl);
|
||||
|
@ -189,8 +191,7 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.CREDENTIALS_FILE,
|
||||
"src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(serviceImpl, CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_REGION, Region.US_WEST_1.id());
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "Role");
|
||||
runner.setProperty(serviceImpl, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "RoleName");
|
||||
|
@ -211,7 +212,7 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.CREDENTIALS_FILE,
|
||||
runner.setProperty(serviceImpl, CREDENTIALS_FILE,
|
||||
"src/test/resources/mock-aws-credentials.properties");
|
||||
runner.enableControllerService(serviceImpl);
|
||||
|
||||
|
@ -230,7 +231,7 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.CREDENTIALS_FILE,
|
||||
runner.setProperty(serviceImpl, CREDENTIALS_FILE,
|
||||
"src/test/resources/bad-mock-aws-credentials.properties");
|
||||
|
||||
runner.assertNotValid(serviceImpl);
|
||||
|
@ -241,10 +242,10 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.CREDENTIALS_FILE,
|
||||
runner.setProperty(serviceImpl, CREDENTIALS_FILE,
|
||||
"src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
|
||||
runner.assertNotValid(serviceImpl);
|
||||
}
|
||||
|
@ -254,9 +255,9 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.CREDENTIALS_FILE,
|
||||
runner.setProperty(serviceImpl, CREDENTIALS_FILE,
|
||||
"src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
|
||||
runner.assertNotValid(serviceImpl);
|
||||
}
|
||||
|
@ -266,9 +267,9 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.CREDENTIALS_FILE,
|
||||
runner.setProperty(serviceImpl, CREDENTIALS_FILE,
|
||||
"src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
|
||||
runner.assertNotValid(serviceImpl);
|
||||
}
|
||||
|
@ -278,7 +279,7 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "awsAccessKey");
|
||||
|
||||
runner.assertNotValid(serviceImpl);
|
||||
}
|
||||
|
@ -288,7 +289,7 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "awsSecretKey");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "awsSecretKey");
|
||||
|
||||
runner.assertNotValid(serviceImpl);
|
||||
}
|
||||
|
@ -298,8 +299,8 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
final TestRunner runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.ACCESS_KEY_ID, "${literal(\"awsAccessKey\")}");
|
||||
runner.setProperty(serviceImpl, CredentialPropertyDescriptors.SECRET_KEY, "${literal(\"awsSecretKey\")}");
|
||||
runner.setProperty(serviceImpl, ACCESS_KEY_ID, "${literal(\"awsAccessKey\")}");
|
||||
runner.setProperty(serviceImpl, SECRET_KEY, "${literal(\"awsSecretKey\")}");
|
||||
runner.enableControllerService(serviceImpl);
|
||||
|
||||
runner.assertValid(serviceImpl);
|
||||
|
@ -309,10 +310,10 @@ public class AWSCredentialsProviderControllerServiceTest {
|
|||
|
||||
assertEquals(
|
||||
"awsAccessKey", service.getCredentialsProvider().getCredentials().getAWSAccessKeyId(),
|
||||
"Expression language should be supported for " + CredentialPropertyDescriptors.ACCESS_KEY_ID.getName());
|
||||
"Expression language should be supported for " + ACCESS_KEY_ID.getName());
|
||||
assertEquals(
|
||||
"awsSecretKey", service.getCredentialsProvider().getCredentials().getAWSSecretKey(),
|
||||
"Expression language should be supported for " + CredentialPropertyDescriptors.SECRET_KEY.getName());
|
||||
"Expression language should be supported for " + SECRET_KEY.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -1,96 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.nifi.processors.aws.credentials.provider.service;
|
||||
|
||||
import org.apache.nifi.processors.aws.AbstractAWSCredentialsProviderProcessor;
|
||||
import org.apache.nifi.processors.aws.s3.FetchS3Object;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class AWSProcessorProxyTest {
|
||||
|
||||
private TestRunner runner;
|
||||
|
||||
@BeforeEach
|
||||
public void testSetup() {
|
||||
runner = TestRunners.newTestRunner(FetchS3Object.class);
|
||||
runner.setProperty(FetchS3Object.BUCKET_WITHOUT_DEFAULT_VALUE, "bucket");
|
||||
runner.assertValid();
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void testTearDown() {
|
||||
runner = null;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProxyHostOnlyInvalid() {
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_HOST, "proxyHost");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProxyHostPortOnlyInvalid() {
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_HOST_PORT, "1");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProxyHostPortNonNumberInvalid() {
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_HOST_PORT, "a");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProxyHostAndPortValid() {
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_HOST_PORT, "1");
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_HOST, "proxyHost");
|
||||
runner.assertValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProxyUserNoPasswordInValid() {
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_USERNAME, "foo");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProxyNoUserPasswordInValid() {
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_PASSWORD, "foo");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProxyUserPasswordNoHostInValid() {
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_USERNAME, "foo");
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_PASSWORD, "foo");
|
||||
runner.assertNotValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProxyUserPasswordHostValid() {
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_HOST_PORT, "1");
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_HOST, "proxyHost");
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_USERNAME, "foo");
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.PROXY_PASSWORD, "foo");
|
||||
runner.assertValid();
|
||||
}
|
||||
|
||||
}
|
|
@ -14,7 +14,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.nifi.processors.aws.credentials.provider.factory;
|
||||
package org.apache.nifi.processors.aws.credentials.provider.service;
|
||||
|
||||
import com.amazonaws.ClientConfiguration;
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
|
@ -22,30 +22,27 @@ import com.amazonaws.client.builder.AwsClientBuilder;
|
|||
import com.amazonaws.regions.Region;
|
||||
import com.amazonaws.services.s3.AmazonS3Client;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.components.ValidationContext;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processors.aws.AbstractAWSCredentialsProviderProcessor;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_ARN;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_EXTERNAL_ID;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_HOST;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_PROXY_PORT;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_CUSTOM_SIGNER_CLASS_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_CUSTOM_SIGNER_MODULE_LOCATION;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_ENDPOINT;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_REGION;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.ASSUME_ROLE_STS_SIGNER_OVERRIDE;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.MAX_SESSION_TIME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.PROFILE_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.USE_ANONYMOUS_CREDENTIALS;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors.USE_DEFAULT_CREDENTIALS;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_EXTERNAL_ID;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_PROXY_HOST;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_PROXY_PORT;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_CUSTOM_SIGNER_CLASS_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_CUSTOM_SIGNER_MODULE_LOCATION;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_ENDPOINT;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_REGION;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.ASSUME_ROLE_STS_SIGNER_OVERRIDE;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.MAX_SESSION_TIME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.PROFILE_NAME;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.USE_ANONYMOUS_CREDENTIALS;
|
||||
import static org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService.USE_DEFAULT_CREDENTIALS;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -55,9 +52,6 @@ public class MockAWSProcessor extends AbstractAWSCredentialsProviderProcessor<Am
|
|||
|
||||
public final List<PropertyDescriptor> properties = Arrays.asList(
|
||||
USE_DEFAULT_CREDENTIALS,
|
||||
ACCESS_KEY,
|
||||
SECRET_KEY,
|
||||
CREDENTIALS_FILE,
|
||||
PROFILE_NAME,
|
||||
USE_ANONYMOUS_CREDENTIALS,
|
||||
ASSUME_ROLE_ARN,
|
||||
|
@ -83,13 +77,6 @@ public class MockAWSProcessor extends AbstractAWSCredentialsProviderProcessor<Am
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
|
||||
CredentialsProviderFactory credsFactory = new CredentialsProviderFactory();
|
||||
final Collection<ValidationResult> validationFailureResults = credsFactory.validate(validationContext);
|
||||
return validationFailureResults;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AmazonS3Client createClient(final ProcessContext context, final AWSCredentialsProvider credentialsProvider, final Region region, final ClientConfiguration config,
|
||||
final AwsClientBuilder.EndpointConfiguration endpointConfiguration) {
|
|
@ -0,0 +1,195 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.nifi.processors.aws.credentials.provider.service;
|
||||
|
||||
import com.amazonaws.auth.AWSCredentials;
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import com.amazonaws.auth.AnonymousAWSCredentials;
|
||||
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
|
||||
import com.amazonaws.auth.PropertiesFileCredentialsProvider;
|
||||
import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider;
|
||||
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.PropertiesCredentialsProvider;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
|
||||
public class TestAWSCredentialsProviderControllerServiceStrategies {
|
||||
|
||||
private TestRunner runner;
|
||||
private AWSCredentialsProviderControllerService service;
|
||||
|
||||
@BeforeEach
|
||||
public void setup() throws InitializationException {
|
||||
runner = TestRunners.newTestRunner(MockAWSProcessor.class);
|
||||
service = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("auth", service);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testImpliedDefaultCredentials() {
|
||||
runner.enableControllerService(service);
|
||||
|
||||
final AWSCredentialsProvider credentialsProvider = service.getCredentialsProvider();
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(DefaultAWSCredentialsProviderChain.class, credentialsProvider.getClass());
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = service.getAwsCredentialsProvider();
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(DefaultCredentialsProvider.class, credentialsProviderV2.getClass());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExplicitDefaultCredentials() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.USE_DEFAULT_CREDENTIALS, "true");
|
||||
runner.assertValid(service);
|
||||
runner.enableControllerService(service);
|
||||
|
||||
final AWSCredentialsProvider credentialsProvider = service.getCredentialsProvider();
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(DefaultAWSCredentialsProviderChain.class, credentialsProvider.getClass());
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = service.getAwsCredentialsProvider();
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(DefaultCredentialsProvider.class, credentialsProviderV2.getClass());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExplicitDefaultCredentialsExclusive() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.USE_DEFAULT_CREDENTIALS, "true");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ACCESS_KEY_ID, "BogusAccessKey");
|
||||
runner.assertNotValid(service);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleCredentials() throws Throwable {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.enableControllerService(service);
|
||||
|
||||
final AWSCredentialsProvider credentialsProvider = service.getCredentialsProvider();
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(STSAssumeRoleSessionCredentialsProvider.class, credentialsProvider.getClass());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFileCredentials() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.enableControllerService(service);
|
||||
|
||||
final AWSCredentialsProvider credentialsProvider = service.getCredentialsProvider();
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(PropertiesFileCredentialsProvider.class, credentialsProvider.getClass());
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = service.getAwsCredentialsProvider();
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(PropertiesCredentialsProvider.class, credentialsProviderV2.getClass());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAccessKeyPairIncomplete() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ACCESS_KEY_ID, "BogusAccessKey");
|
||||
runner.assertNotValid(service);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleCredentialsInvalidSessionTime() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.MAX_SESSION_TIME, "10");
|
||||
runner.assertNotValid(service);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAnonymousCredentials() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.USE_ANONYMOUS_CREDENTIALS, "true");
|
||||
runner.assertValid(service);
|
||||
runner.enableControllerService(service);
|
||||
|
||||
final AWSCredentialsProvider credentialsProvider = service.getCredentialsProvider();
|
||||
assertNotNull(credentialsProvider);
|
||||
final AWSCredentials creds = credentialsProvider.getCredentials();
|
||||
assertEquals(AnonymousAWSCredentials.class, creds.getClass());
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = service.getAwsCredentialsProvider();
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(AnonymousCredentialsProvider.class, credentialsProviderV2.getClass());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAnonymousAndDefaultCredentials() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.USE_DEFAULT_CREDENTIALS, "true");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.USE_ANONYMOUS_CREDENTIALS, "true");
|
||||
runner.assertNotValid(service);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNamedProfileCredentials() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.USE_DEFAULT_CREDENTIALS, "false");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.PROFILE_NAME, "BogusProfile");
|
||||
runner.enableControllerService(service);
|
||||
|
||||
final AWSCredentialsProvider credentialsProvider = service.getCredentialsProvider();
|
||||
assertNotNull(credentialsProvider);
|
||||
assertEquals(ProfileCredentialsProvider.class, credentialsProvider.getClass());
|
||||
|
||||
final AwsCredentialsProvider credentialsProviderV2 = service.getAwsCredentialsProvider();
|
||||
assertNotNull(credentialsProviderV2);
|
||||
assertEquals(software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider.class, credentialsProviderV2.getClass());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleMissingProxyHost() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_PROXY_PORT, "8080");
|
||||
runner.assertNotValid(service);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleMissingProxyPort() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_PROXY_HOST, "proxy.company.com");
|
||||
runner.assertNotValid(service);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAssumeRoleInvalidProxyPort() {
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_ARN, "BogusArn");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_NAME, "BogusSession");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_PROXY_HOST, "proxy.company.com");
|
||||
runner.setProperty(service, AWSCredentialsProviderControllerService.ASSUME_ROLE_PROXY_PORT, "notIntPort");
|
||||
runner.assertNotValid(service);
|
||||
}
|
||||
}
|
|
@ -27,6 +27,8 @@ import com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult;
|
|||
import com.amazonaws.services.dynamodbv2.model.DeleteRequest;
|
||||
import com.amazonaws.services.dynamodbv2.model.WriteRequest;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -69,8 +71,24 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
|
||||
}
|
||||
|
||||
private TestRunner createRunner() throws InitializationException {
|
||||
return createRunner(deleteDynamoDB);
|
||||
}
|
||||
|
||||
private TestRunner createRunner(final DeleteDynamoDB processor) throws InitializationException {
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(processor);
|
||||
AuthUtils.enableAccessKey(deleteRunner, "abcd", "cdef");
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
|
||||
return deleteRunner;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteOnlyHashFailure() {
|
||||
public void testStringHashStringRangeDeleteOnlyHashFailure() throws InitializationException {
|
||||
// Inject a mock DynamoDB to create the exception condition
|
||||
final DynamoDB mockDynamoDb = Mockito.mock(DynamoDB.class);
|
||||
// When writing, mock thrown service exception from AWS
|
||||
|
@ -83,14 +101,8 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
};
|
||||
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
|
||||
final TestRunner deleteRunner = createRunner();
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY, "abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
deleteRunner.enqueue(new byte[] {});
|
||||
|
||||
deleteRunner.run(1);
|
||||
|
@ -105,15 +117,8 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteSuccessfulWithMock() {
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
public void testStringHashStringRangeDeleteSuccessfulWithMock() throws InitializationException {
|
||||
final TestRunner deleteRunner = createRunner();
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
deleteRunner.enqueue(new byte[] {});
|
||||
|
@ -125,7 +130,7 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteSuccessfulWithMockOneUnprocessed() {
|
||||
public void testStringHashStringRangeDeleteSuccessfulWithMockOneUnprocessed() throws InitializationException {
|
||||
Map<String, List<WriteRequest>> unprocessed =
|
||||
new HashMap<String, List<WriteRequest>>();
|
||||
DeleteRequest delete = new DeleteRequest();
|
||||
|
@ -136,14 +141,7 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
writes.add(write);
|
||||
unprocessed.put(stringHashStringRangeTableName, writes);
|
||||
result.setUnprocessedItems(unprocessed);
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
final TestRunner deleteRunner = createRunner();
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
deleteRunner.enqueue(new byte[] {});
|
||||
|
@ -155,16 +153,13 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteNoHashValueFailure() {
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(DeleteDynamoDB.class);
|
||||
public void testStringHashStringRangeDeleteNoHashValueFailure() throws InitializationException {
|
||||
final TestRunner deleteRunner = createRunner(new DeleteDynamoDB());
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
deleteRunner.removeProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE);
|
||||
deleteRunner.enqueue(new byte[] {});
|
||||
|
||||
deleteRunner.run(1);
|
||||
|
@ -179,15 +174,9 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteOnlyHashWithRangeValueNoRangeNameFailure() {
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(DeleteDynamoDB.class);
|
||||
public void testStringHashStringRangeDeleteOnlyHashWithRangeValueNoRangeNameFailure() throws InitializationException {
|
||||
final TestRunner deleteRunner = createRunner(new DeleteDynamoDB());
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
deleteRunner.enqueue(new byte[] {});
|
||||
|
||||
|
@ -203,16 +192,9 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteOnlyHashWithRangeNameNoRangeValueFailure() {
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(DeleteDynamoDB.class);
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
public void testStringHashStringRangeDeleteOnlyHashWithRangeNameNoRangeValueFailure() throws InitializationException {
|
||||
final TestRunner deleteRunner = createRunner(new DeleteDynamoDB());
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
deleteRunner.enqueue(new byte[] {});
|
||||
|
||||
deleteRunner.run(1);
|
||||
|
@ -226,47 +208,33 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteNonExistentHashSuccess() {
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
|
||||
public void testStringHashStringRangeDeleteNonExistentHashSuccess() throws InitializationException {
|
||||
final TestRunner deleteRunner = createRunner();
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "nonexistent");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
deleteRunner.enqueue(new byte[] {});
|
||||
|
||||
deleteRunner.run(1);
|
||||
|
||||
deleteRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_SUCCESS, 1);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteNonExistentRangeSuccess() {
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
public void testStringHashStringRangeDeleteNonExistentRangeSuccess() throws InitializationException {
|
||||
final TestRunner deleteRunner = createRunner();
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "nonexistent");
|
||||
deleteRunner.enqueue(new byte[] {});
|
||||
|
||||
deleteRunner.run(1);
|
||||
|
||||
deleteRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_SUCCESS, 1);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteThrowsServiceException() {
|
||||
public void testStringHashStringRangeDeleteThrowsServiceException() throws InitializationException {
|
||||
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
|
||||
@Override
|
||||
public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) {
|
||||
|
@ -280,14 +248,8 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
return mockDynamoDB;
|
||||
}
|
||||
};
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
final TestRunner deleteRunner = createRunner();
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
|
||||
|
@ -305,7 +267,7 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteThrowsClientException() {
|
||||
public void testStringHashStringRangeDeleteThrowsClientException() throws InitializationException {
|
||||
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
|
||||
@Override
|
||||
public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) {
|
||||
|
@ -319,14 +281,8 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
return mockDynamoDB;
|
||||
}
|
||||
};
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
final TestRunner deleteRunner = createRunner(deleteDynamoDB);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
|
||||
|
@ -343,7 +299,7 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeDeleteThrowsRuntimeException() {
|
||||
public void testStringHashStringRangeDeleteThrowsRuntimeException() throws InitializationException {
|
||||
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
|
||||
@Override
|
||||
public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) {
|
||||
|
@ -357,14 +313,7 @@ public class DeleteDynamoDBTest extends AbstractDynamoDBTest {
|
|||
return mockDynamoDB;
|
||||
}
|
||||
};
|
||||
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
|
||||
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
final TestRunner deleteRunner = createRunner(deleteDynamoDB);
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import com.amazonaws.services.dynamodbv2.model.KeysAndAttributes;
|
|||
import org.apache.nifi.components.ConfigVerificationResult;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.VerifiableProcessor;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -87,18 +88,8 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
|
||||
@Test
|
||||
public void testStringHashStringRangeGetUnprocessed() {
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
getRunner.enqueue(new byte[] {});
|
||||
final TestRunner getRunner = createRunner();
|
||||
getRunner.enqueue(new byte[]{});
|
||||
|
||||
getRunner.run(1);
|
||||
|
||||
|
@ -107,11 +98,28 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
|
||||
getRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_UNPROCESSED, 1);
|
||||
|
||||
List<MockFlowFile> flowFiles = getRunner.getFlowFilesForRelationship(AbstractDynamoDBProcessor.REL_UNPROCESSED);
|
||||
final List<MockFlowFile> flowFiles = getRunner.getFlowFilesForRelationship(AbstractDynamoDBProcessor.REL_UNPROCESSED);
|
||||
for (MockFlowFile flowFile : flowFiles) {
|
||||
assertNotNull(flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_KEY_ERROR_UNPROCESSED));
|
||||
flowFile.assertAttributeExists(AbstractDynamoDBProcessor.DYNAMODB_KEY_ERROR_UNPROCESSED);
|
||||
}
|
||||
}
|
||||
|
||||
private TestRunner createRunner() {
|
||||
return createRunner(getDynamoDB);
|
||||
}
|
||||
|
||||
private TestRunner createRunner(final GetDynamoDB dynamoDB) {
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(dynamoDB);
|
||||
AuthUtils.enableAccessKey(getRunner, "abcd", "defg");
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
return getRunner;
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -145,17 +153,7 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
};
|
||||
|
||||
getDynamoDB = mockDynamoDB(mockDynamoDB);
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
final TestRunner getRunner = createRunner(getDynamoDB);
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
@ -203,17 +201,7 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
};
|
||||
|
||||
getDynamoDB = mockDynamoDB(mockDynamoDB);
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
final TestRunner getRunner = createRunner(getDynamoDB);
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
@ -234,17 +222,7 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
|
||||
final GetDynamoDB getDynamoDB = mockDynamoDB(mockDynamoDB);
|
||||
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
final TestRunner getRunner = createRunner(getDynamoDB);
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
@ -272,17 +250,7 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
|
||||
final GetDynamoDB getDynamoDB = mockDynamoDB(mockDynamoDB);
|
||||
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
final TestRunner getRunner = createRunner(getDynamoDB);
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
@ -294,7 +262,6 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
for (MockFlowFile flowFile : flowFiles) {
|
||||
assertEquals("runtimeException", flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_ERROR_EXCEPTION_MESSAGE));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -310,17 +277,7 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
|
||||
final GetDynamoDB getDynamoDB = mockDynamoDB(mockDynamoDB);
|
||||
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
final TestRunner getRunner = createRunner(getDynamoDB);
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
@ -354,17 +311,7 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
};
|
||||
|
||||
final GetDynamoDB getDynamoDB = mockDynamoDB(notFoundMockDynamoDB);
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
final TestRunner getRunner = createRunner(getDynamoDB);
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
@ -389,15 +336,7 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
|
||||
getDynamoDB = mockDynamoDB(mockDynamoDb);
|
||||
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
final TestRunner getRunner = createRunner(getDynamoDB);
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
@ -415,16 +354,8 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
|
||||
@Test
|
||||
public void testStringHashStringRangeGetNoHashValueFailure() {
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(GetDynamoDB.class);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
final TestRunner getRunner = createRunner();
|
||||
getRunner.removeProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE);
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
@ -440,16 +371,9 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
|
||||
@Test
|
||||
public void testStringHashStringRangeGetOnlyHashWithRangeValueNoRangeNameFailure() {
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(GetDynamoDB.class);
|
||||
final TestRunner getRunner = createRunner();
|
||||
getRunner.removeProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
@ -460,21 +384,13 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
for (MockFlowFile flowFile : flowFiles) {
|
||||
assertNotNull(flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_RANGE_KEY_VALUE_ERROR));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangeGetOnlyHashWithRangeNameNoRangeValueFailure() {
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(GetDynamoDB.class);
|
||||
final TestRunner getRunner = createRunner();
|
||||
getRunner.removeProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
@ -487,7 +403,6 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
}
|
||||
|
||||
// Incorporated test from James W
|
||||
@Test
|
||||
public void testStringHashStringNoRangeGetUnprocessed() {
|
||||
unprocessed.clear();
|
||||
|
@ -497,15 +412,10 @@ public class GetDynamoDBTest extends AbstractDynamoDBTest {
|
|||
kaa.withKeys(map);
|
||||
unprocessed.put(stringHashStringRangeTableName, kaa);
|
||||
|
||||
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
|
||||
final TestRunner getRunner = createRunner();
|
||||
getRunner.removeProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME);
|
||||
getRunner.removeProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE);
|
||||
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
getRunner.enqueue(new byte[] {});
|
||||
|
||||
getRunner.run(1);
|
||||
|
|
|
@ -27,6 +27,8 @@ import com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult;
|
|||
import com.amazonaws.services.dynamodbv2.model.PutRequest;
|
||||
import com.amazonaws.services.dynamodbv2.model.WriteRequest;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -35,6 +37,7 @@ import org.junit.jupiter.api.Test;
|
|||
import org.mockito.ArgumentMatchers;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
|
@ -45,7 +48,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
|||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
|
||||
public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
||||
|
||||
private static final byte[] HELLO_2_BYTES = "{\"hell\": 2}".getBytes(StandardCharsets.UTF_8);
|
||||
protected PutDynamoDB putDynamoDB;
|
||||
protected BatchWriteItemResult result = new BatchWriteItemResult();
|
||||
BatchWriteItemOutcome outcome;
|
||||
|
@ -69,8 +72,24 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
};
|
||||
}
|
||||
|
||||
private TestRunner createRunner() throws InitializationException {
|
||||
return createRunner(putDynamoDB);
|
||||
}
|
||||
|
||||
private TestRunner createRunner(final PutDynamoDB processor) {
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(processor);
|
||||
AuthUtils.enableAccessKey(putRunner, "abcd", "cdef");
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
return putRunner;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutOnlyHashFailure() {
|
||||
public void testStringHashStringRangePutOnlyHashFailure() throws InitializationException {
|
||||
// Inject a mock DynamoDB to create the exception condition
|
||||
final DynamoDB mockDynamoDb = Mockito.mock(DynamoDB.class);
|
||||
// When writing, mock thrown service exception from AWS
|
||||
|
@ -83,17 +102,8 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
};
|
||||
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(putDynamoDB);
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
String document = "{\"hello\": 2}";
|
||||
putRunner.enqueue(document.getBytes());
|
||||
final TestRunner putRunner = createRunner();
|
||||
putRunner.enqueue(HELLO_2_BYTES);
|
||||
|
||||
putRunner.run(1);
|
||||
|
||||
|
@ -108,18 +118,11 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
|
||||
@Test
|
||||
public void testStringHashStringRangePutNoHashValueFailure() {
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(PutDynamoDB.class);
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
final TestRunner putRunner = createRunner(new PutDynamoDB());
|
||||
putRunner.removeProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
String document = "{\"hello\": 2}";
|
||||
putRunner.enqueue(document.getBytes());
|
||||
putRunner.enqueue(HELLO_2_BYTES);
|
||||
|
||||
putRunner.run(1);
|
||||
|
||||
|
@ -129,21 +132,12 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
for (MockFlowFile flowFile : flowFiles) {
|
||||
assertNotNull(flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_HASH_KEY_VALUE_ERROR));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutOnlyHashWithRangeValueNoRangeNameFailure() {
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(PutDynamoDB.class);
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
public void testStringHashStringRangePutOnlyHashWithRangeValueNoRangeNameFailure() throws InitializationException {
|
||||
final TestRunner putRunner = createRunner(new PutDynamoDB());
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
putRunner.enqueue(new byte[] {});
|
||||
|
||||
putRunner.run(1);
|
||||
|
@ -154,18 +148,12 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
for (MockFlowFile flowFile : flowFiles) {
|
||||
assertNotNull(flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_RANGE_KEY_VALUE_ERROR));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutOnlyHashWithRangeNameNoRangeValueFailure() {
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(PutDynamoDB.class);
|
||||
public void testStringHashStringRangePutOnlyHashWithRangeNameNoRangeValueFailure() throws InitializationException {
|
||||
final TestRunner putRunner = createRunner(new PutDynamoDB());
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
|
||||
|
@ -182,18 +170,10 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutSuccessfulWithMock() {
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(putDynamoDB);
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
public void testStringHashStringRangePutSuccessfulWithMock() throws InitializationException {
|
||||
final TestRunner putRunner = createRunner();
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
String document = "{\"name\":\"john\"}";
|
||||
putRunner.enqueue(document.getBytes());
|
||||
|
||||
|
@ -206,22 +186,13 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
System.out.println(flowFile.getAttributes());
|
||||
assertEquals(document, new String(flowFile.toByteArray()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutOneSuccessfulOneSizeFailureWithMockBatchSize1() {
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(putDynamoDB);
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
public void testStringHashStringRangePutOneSuccessfulOneSizeFailureWithMockBatchSize1() throws InitializationException {
|
||||
final TestRunner putRunner = createRunner();
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
String document = "{\"name\":\"john\"}";
|
||||
putRunner.enqueue(document.getBytes());
|
||||
|
||||
|
@ -247,18 +218,11 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutOneSuccessfulOneSizeFailureWithMockBatchSize5() {
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(putDynamoDB);
|
||||
public void testStringHashStringRangePutOneSuccessfulOneSizeFailureWithMockBatchSize5() throws InitializationException {
|
||||
final TestRunner putRunner = createRunner();
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.BATCH_SIZE, "5");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
String document = "{\"name\":\"john\"}";
|
||||
putRunner.enqueue(document.getBytes());
|
||||
|
||||
|
@ -284,18 +248,10 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutFailedWithItemSizeGreaterThan400Kb() {
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(putDynamoDB);
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
public void testStringHashStringRangePutFailedWithItemSizeGreaterThan400Kb() throws InitializationException {
|
||||
final TestRunner putRunner = createRunner();
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
byte [] item = new byte[PutDynamoDB.DYNAMODB_MAX_ITEM_SIZE + 1];
|
||||
Arrays.fill(item, (byte) 'a');
|
||||
String document = new String(item);
|
||||
|
@ -315,7 +271,7 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutThrowsServiceException() {
|
||||
public void testStringHashStringRangePutThrowsServiceException() throws InitializationException {
|
||||
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
|
||||
@Override
|
||||
public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) {
|
||||
|
@ -329,17 +285,10 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
return mockDynamoDB;
|
||||
}
|
||||
};
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(putDynamoDB);
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
final TestRunner putRunner = createRunner();
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
String document = "{\"name\":\"john\"}";
|
||||
putRunner.enqueue(document.getBytes());
|
||||
|
||||
|
@ -355,7 +304,7 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutThrowsClientException() {
|
||||
public void testStringHashStringRangePutThrowsClientException() throws InitializationException {
|
||||
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
|
||||
@Override
|
||||
public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) {
|
||||
|
@ -369,17 +318,10 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
return mockDynamoDB;
|
||||
}
|
||||
};
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(putDynamoDB);
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
final TestRunner putRunner = createRunner();
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
String document = "{\"name\":\"john\"}";
|
||||
putRunner.enqueue(document.getBytes());
|
||||
|
||||
|
@ -394,7 +336,7 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutThrowsRuntimeException() {
|
||||
public void testStringHashStringRangePutThrowsRuntimeException() throws InitializationException {
|
||||
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
|
||||
@Override
|
||||
public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) {
|
||||
|
@ -408,17 +350,10 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
return mockDynamoDB;
|
||||
}
|
||||
};
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(putDynamoDB);
|
||||
final TestRunner putRunner = createRunner();
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "document");
|
||||
String document = "{\"name\":\"john\"}";
|
||||
putRunner.enqueue(document.getBytes());
|
||||
|
||||
|
@ -433,7 +368,7 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStringHashStringRangePutSuccessfulWithMockOneUnprocessed() {
|
||||
public void testStringHashStringRangePutSuccessfulWithMockOneUnprocessed() throws InitializationException {
|
||||
final Map<String, List<WriteRequest>> unprocessed = new HashMap<>();
|
||||
final PutRequest put = new PutRequest();
|
||||
put.addItemEntry("hashS", new AttributeValue("h1"));
|
||||
|
@ -443,14 +378,8 @@ public class PutDynamoDBTest extends AbstractDynamoDBTest {
|
|||
writes.add(write);
|
||||
unprocessed.put(stringHashStringRangeTableName, writes);
|
||||
result.setUnprocessedItems(unprocessed);
|
||||
final TestRunner putRunner = TestRunners.newTestRunner(putDynamoDB);
|
||||
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
|
||||
final TestRunner putRunner = createRunner();
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
|
||||
putRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j2");
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
package org.apache.nifi.processors.aws.kinesis.firehose;
|
||||
|
||||
import org.apache.nifi.processors.aws.s3.FetchS3Object;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -48,7 +49,7 @@ public class ITPutKinesisFirehose {
|
|||
@BeforeEach
|
||||
public void setUp() throws Exception {
|
||||
runner = TestRunners.newTestRunner(PutKinesisFirehose.class);
|
||||
runner.setProperty(PutKinesisFirehose.CREDENTIALS_FILE, CREDENTIALS_FILE);
|
||||
AuthUtils.enableCredentialsFile(runner, CREDENTIALS_FILE);
|
||||
runner.setProperty(PutKinesisFirehose.KINESIS_FIREHOSE_DELIVERY_STREAM_NAME, "testkinesis");
|
||||
}
|
||||
|
||||
|
|
|
@ -1,81 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.nifi.processors.aws.kinesis.firehose;
|
||||
|
||||
import org.apache.nifi.processors.aws.s3.FetchS3Object;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static com.amazonaws.SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPERTY;
|
||||
|
||||
// This integration test can be run against a mock Kenesis Firehose such as
|
||||
// https://github.com/localstack/localstack
|
||||
public class ITPutKinesisFirehoseWithEndpointOverride {
|
||||
|
||||
private TestRunner runner;
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() throws Exception {
|
||||
runner = TestRunners.newTestRunner(PutKinesisFirehose.class);
|
||||
runner.setProperty(PutKinesisFirehose.ACCESS_KEY, "access key");
|
||||
runner.setProperty(PutKinesisFirehose.SECRET_KEY, "secret key");
|
||||
runner.setProperty(PutKinesisFirehose.KINESIS_FIREHOSE_DELIVERY_STREAM_NAME, "test");
|
||||
runner.setProperty(PutKinesisFirehose.ENDPOINT_OVERRIDE, "http://localhost:4573");
|
||||
runner.assertValid();
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void tearDown() throws Exception {
|
||||
runner = null;
|
||||
|
||||
System.clearProperty(AWS_CBOR_DISABLE_SYSTEM_PROPERTY);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIntegrationSuccess() throws Exception {
|
||||
runner.assertValid();
|
||||
|
||||
runner.enqueue("test".getBytes());
|
||||
runner.run(1);
|
||||
|
||||
runner.assertAllFlowFilesTransferred(PutKinesisFirehose.REL_SUCCESS, 1);
|
||||
|
||||
final List<MockFlowFile> ffs = runner.getFlowFilesForRelationship(FetchS3Object.REL_SUCCESS);
|
||||
final MockFlowFile out = ffs.iterator().next();
|
||||
|
||||
out.assertContentEquals("test".getBytes());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIntegrationFailedBadStreamName() throws Exception {
|
||||
runner.setProperty(PutKinesisFirehose.KINESIS_FIREHOSE_DELIVERY_STREAM_NAME, "notfound");
|
||||
runner.assertValid();
|
||||
|
||||
runner.enqueue("test".getBytes());
|
||||
runner.run(1);
|
||||
|
||||
runner.assertAllFlowFilesTransferred(PutKinesisFirehose.REL_FAILURE, 1);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -16,6 +16,7 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.aws.kinesis.firehose;
|
||||
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -34,6 +35,7 @@ public class TestPutKinesisFirehose {
|
|||
public void setUp() throws Exception {
|
||||
runner = TestRunners.newTestRunner(PutKinesisFirehose.class);
|
||||
runner.setProperty(PutKinesisFirehose.KINESIS_FIREHOSE_DELIVERY_STREAM_NAME, "deliveryName");
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
runner.assertValid();
|
||||
}
|
||||
|
||||
|
|
|
@ -17,10 +17,10 @@
|
|||
package org.apache.nifi.processors.aws.kinesis.stream;
|
||||
|
||||
import org.apache.nifi.processors.aws.credentials.provider.PropertiesCredentialsProvider;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.Assumptions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import software.amazon.awssdk.http.apache.ApacheHttpClient;
|
||||
import software.amazon.awssdk.regions.Region;
|
||||
|
@ -32,11 +32,12 @@ import java.io.File;
|
|||
|
||||
public class ITConsumeKinesisStreamConnectAWS extends ITConsumeKinesisStream {
|
||||
|
||||
private final static File CREDENTIALS_FILE =
|
||||
new File(System.getProperty("user.home") + "/aws-credentials.properties");
|
||||
private final static File CREDENTIALS_FILE = new File(System.getProperty("user.home") + "/aws-credentials.properties");
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() throws InterruptedException, InitializationException {
|
||||
Assumptions.assumeTrue(CREDENTIALS_FILE.exists());
|
||||
|
||||
System.setProperty("aws.cborEnabled", "false");
|
||||
|
||||
kinesis = KinesisClient.builder()
|
||||
|
@ -56,10 +57,7 @@ public class ITConsumeKinesisStreamConnectAWS extends ITConsumeKinesisStream {
|
|||
waitForKinesisToInitialize();
|
||||
|
||||
runner = TestRunners.newTestRunner(ConsumeKinesisStream.class);
|
||||
final AWSCredentialsProviderControllerService credentialsService = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("credentials-service", credentialsService);
|
||||
runner.setProperty(credentialsService, CredentialPropertyDescriptors.CREDENTIALS_FILE, CREDENTIALS_FILE.getAbsolutePath());
|
||||
runner.enableControllerService(credentialsService);
|
||||
AuthUtils.enableCredentialsFile(runner, CREDENTIALS_FILE.getAbsolutePath());
|
||||
runner.setProperty(ConsumeKinesisStream.APPLICATION_NAME, APPLICATION_NAME);
|
||||
runner.setProperty(ConsumeKinesisStream.KINESIS_STREAM_NAME, KINESIS_STREAM_NAME);
|
||||
runner.setProperty(ConsumeKinesisStream.AWS_CREDENTIALS_PROVIDER_SERVICE, "credentials-service");
|
||||
|
|
|
@ -16,12 +16,12 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.aws.kinesis.stream;
|
||||
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.Assumptions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentials;
|
||||
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
|
||||
import software.amazon.awssdk.http.apache.ApacheHttpClient;
|
||||
|
@ -35,8 +35,7 @@ import java.net.URI;
|
|||
|
||||
public class ITConsumeKinesisStreamEndpointOverride extends ITConsumeKinesisStream {
|
||||
|
||||
private final static File CREDENTIALS_FILE =
|
||||
new File(System.getProperty("user.home") + "/aws-credentials.properties");
|
||||
private final static File CREDENTIALS_FILE = new File(System.getProperty("user.home") + "/aws-credentials.properties");
|
||||
|
||||
private static final String ACCESS_KEY = "test";
|
||||
private static final String SECRET_KEY = "test";
|
||||
|
@ -45,21 +44,12 @@ public class ITConsumeKinesisStreamEndpointOverride extends ITConsumeKinesisStre
|
|||
private static final String LOCAL_STACK_KINESIS_ENDPOINT_OVERRIDE = "http://localhost:4566";
|
||||
private static final String LOCAL_STACK_DYNAMODB_ENDPOINT_OVERRIDE = "http://localhost:4566";
|
||||
|
||||
private final AwsCredentialsProvider awsCredentialsProvider =
|
||||
StaticCredentialsProvider.create(new AwsCredentials() {
|
||||
@Override
|
||||
public String accessKeyId() {
|
||||
return ACCESS_KEY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String secretAccessKey() {
|
||||
return SECRET_KEY;
|
||||
}
|
||||
});
|
||||
private final AwsCredentialsProvider awsCredentialsProvider = StaticCredentialsProvider.create(
|
||||
AwsBasicCredentials.create(ACCESS_KEY, SECRET_KEY));
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() throws InterruptedException, InitializationException {
|
||||
Assumptions.assumeTrue(CREDENTIALS_FILE.exists());
|
||||
System.setProperty("aws.cborEnabled", "false");
|
||||
|
||||
kinesis = KinesisClient.builder()
|
||||
|
@ -81,12 +71,8 @@ public class ITConsumeKinesisStreamEndpointOverride extends ITConsumeKinesisStre
|
|||
waitForKinesisToInitialize();
|
||||
|
||||
runner = TestRunners.newTestRunner(ConsumeKinesisStream.class);
|
||||
final AWSCredentialsProviderControllerService credentialsService = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("credentials-service", credentialsService);
|
||||
runner.setProperty(credentialsService, CredentialPropertyDescriptors.CREDENTIALS_FILE, CREDENTIALS_FILE.getAbsolutePath());
|
||||
runner.enableControllerService(credentialsService);
|
||||
AuthUtils.enableCredentialsFile(runner, CREDENTIALS_FILE.getAbsolutePath());
|
||||
|
||||
runner.setProperty(ConsumeKinesisStream.AWS_CREDENTIALS_PROVIDER_SERVICE, "credentials-service");
|
||||
runner.setProperty(ConsumeKinesisStream.APPLICATION_NAME, APPLICATION_NAME);
|
||||
runner.setProperty(ConsumeKinesisStream.KINESIS_STREAM_NAME, KINESIS_STREAM_NAME);
|
||||
runner.setProperty(ConsumeKinesisStream.REGION, REGION);
|
||||
|
|
|
@ -17,13 +17,15 @@
|
|||
package org.apache.nifi.processors.aws.kinesis.stream;
|
||||
|
||||
import org.apache.nifi.processors.aws.kinesis.KinesisProcessorUtils;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Assumptions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -43,26 +45,18 @@ public class ITPutKinesisStream {
|
|||
protected final static String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() throws Exception {
|
||||
public void setUp() {
|
||||
Assumptions.assumeTrue(new File(CREDENTIALS_FILE).exists());
|
||||
|
||||
runner = TestRunners.newTestRunner(PutKinesisStream.class);
|
||||
runner.setProperty(PutKinesisStream.KINESIS_STREAM_NAME, "kstream");
|
||||
runner.setProperty(PutKinesisStream.CREDENTIALS_FILE, CREDENTIALS_FILE);
|
||||
AuthUtils.enableCredentialsFile(runner, CREDENTIALS_FILE);
|
||||
runner.assertValid();
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void tearDown() throws Exception {
|
||||
runner = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Comment out ignore for integration tests (requires creds files)
|
||||
*/
|
||||
@Test
|
||||
public void testIntegrationSuccess() throws Exception {
|
||||
runner.setProperty(PutKinesisStream.CREDENTIALS_FILE, CREDENTIALS_FILE);
|
||||
runner.assertValid();
|
||||
|
||||
runner.enqueue("test".getBytes());
|
||||
runner.run(1);
|
||||
|
||||
|
@ -107,9 +101,6 @@ public class ITPutKinesisStream {
|
|||
out.assertContentEquals("test".getBytes());
|
||||
}
|
||||
|
||||
/**
|
||||
* Comment out ignore for integration tests (requires creds files)
|
||||
*/
|
||||
@Test
|
||||
public void testIntegrationFailedBadStreamName() throws Exception {
|
||||
runner.setProperty(PutKinesisStream.KINESIS_STREAM_NAME, "bad-kstream");
|
||||
|
|
|
@ -16,11 +16,13 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.aws.kinesis.stream;
|
||||
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -29,24 +31,24 @@ import static com.amazonaws.SDKGlobalConfiguration.AWS_CBOR_DISABLE_SYSTEM_PROPE
|
|||
|
||||
// This integration test can be run against a mock Kenesis such as
|
||||
// https://github.com/mhart/kinesalite or https://github.com/localstack/localstack
|
||||
@Disabled("Required external service be running. Needs to be updated to make use of Localstack TestContainer")
|
||||
public class ITPutKinesisStreamWithEndpointOverride {
|
||||
|
||||
private TestRunner runner;
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() throws Exception {
|
||||
public void setUp() {
|
||||
System.setProperty(AWS_CBOR_DISABLE_SYSTEM_PROPERTY, "true");
|
||||
|
||||
runner = TestRunners.newTestRunner(PutKinesisStream.class);
|
||||
runner.setProperty(PutKinesisStream.KINESIS_STREAM_NAME, "test");
|
||||
runner.setProperty(PutKinesisStream.ACCESS_KEY, "access key");
|
||||
runner.setProperty(PutKinesisStream.SECRET_KEY, "secret key");
|
||||
runner.setProperty(PutKinesisStream.ENDPOINT_OVERRIDE, "http://localhost:4568");
|
||||
AuthUtils.enableAccessKey(runner, "accessKey", "secretKey");
|
||||
runner.assertValid();
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void tearDown() throws Exception {
|
||||
public void tearDown() {
|
||||
runner = null;
|
||||
|
||||
System.clearProperty(AWS_CBOR_DISABLE_SYSTEM_PROPERTY);
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.nifi.json.JsonTreeReader;
|
|||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSessionFactory;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderService;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
|
@ -62,7 +61,7 @@ public class TestConsumeKinesisStream {
|
|||
// use anonymous credentials by default
|
||||
final ControllerService credentialsProvider = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("credentials-provider", credentialsProvider);
|
||||
runner.setProperty(credentialsProvider, CredentialPropertyDescriptors.USE_ANONYMOUS_CREDENTIALS, "true");
|
||||
runner.setProperty(credentialsProvider, AWSCredentialsProviderControllerService.USE_ANONYMOUS_CREDENTIALS, "true");
|
||||
runner.assertValid(credentialsProvider);
|
||||
runner.enableControllerService(credentialsProvider);
|
||||
runner.setProperty(ConsumeKinesisStream.AWS_CREDENTIALS_PROVIDER_SERVICE, "credentials-provider");
|
||||
|
@ -70,19 +69,6 @@ public class TestConsumeKinesisStream {
|
|||
runner.assertValid();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidWithCredentials() throws InitializationException {
|
||||
final ControllerService credentialsProvider = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("credentials-provider", credentialsProvider);
|
||||
runner.setProperty(credentialsProvider, CredentialPropertyDescriptors.ACCESS_KEY_ID, "access-key");
|
||||
runner.setProperty(credentialsProvider, CredentialPropertyDescriptors.SECRET_KEY, "secret-key");
|
||||
runner.assertValid(credentialsProvider);
|
||||
runner.enableControllerService(credentialsProvider);
|
||||
runner.setProperty(ConsumeKinesisStream.AWS_CREDENTIALS_PROVIDER_SERVICE, "credentials-provider");
|
||||
runner.assertValid();
|
||||
|
||||
((ConsumeKinesisStream) runner.getProcessor()).onScheduled(runner.getProcessContext());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMissingMandatoryProperties() {
|
||||
|
@ -346,10 +332,10 @@ public class TestConsumeKinesisStream {
|
|||
final AWSCredentialsProviderService awsCredentialsProviderService = new AWSCredentialsProviderControllerService();
|
||||
mockConsumeKinesisStreamRunner.addControllerService("aws-credentials", awsCredentialsProviderService);
|
||||
if (withCredentials) {
|
||||
mockConsumeKinesisStreamRunner.setProperty(awsCredentialsProviderService, CredentialPropertyDescriptors.ACCESS_KEY_ID, "test-access");
|
||||
mockConsumeKinesisStreamRunner.setProperty(awsCredentialsProviderService, CredentialPropertyDescriptors.SECRET_KEY, "test-secret");
|
||||
mockConsumeKinesisStreamRunner.setProperty(awsCredentialsProviderService, AWSCredentialsProviderControllerService.ACCESS_KEY_ID, "test-access");
|
||||
mockConsumeKinesisStreamRunner.setProperty(awsCredentialsProviderService, AWSCredentialsProviderControllerService.SECRET_KEY, "test-secret");
|
||||
} else {
|
||||
mockConsumeKinesisStreamRunner.setProperty(awsCredentialsProviderService, CredentialPropertyDescriptors.USE_ANONYMOUS_CREDENTIALS, "true");
|
||||
mockConsumeKinesisStreamRunner.setProperty(awsCredentialsProviderService, AWSCredentialsProviderControllerService.USE_ANONYMOUS_CREDENTIALS, "true");
|
||||
}
|
||||
mockConsumeKinesisStreamRunner.assertValid(awsCredentialsProviderService);
|
||||
mockConsumeKinesisStreamRunner.enableControllerService(awsCredentialsProviderService);
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
package org.apache.nifi.processors.aws.kinesis.stream;
|
||||
|
||||
import org.apache.nifi.processors.aws.kinesis.KinesisProcessorUtils;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -35,9 +36,8 @@ public class TestPutKinesisStream {
|
|||
@BeforeEach
|
||||
public void setUp() throws Exception {
|
||||
runner = TestRunners.newTestRunner(PutKinesisStream.class);
|
||||
runner.setProperty(PutKinesisStream.ACCESS_KEY, "abcd");
|
||||
runner.setProperty(PutKinesisStream.SECRET_KEY, "secret key");
|
||||
runner.setProperty(PutKinesisStream.KINESIS_STREAM_NAME, "kstream");
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
runner.assertValid();
|
||||
}
|
||||
|
||||
|
|
|
@ -16,13 +16,16 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.aws.lambda;
|
||||
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Assumptions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
@ -39,31 +42,29 @@ public class ITPutLambda {
|
|||
protected final static String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() throws Exception {
|
||||
public void setUp() {
|
||||
Assumptions.assumeTrue(new File(CREDENTIALS_FILE).exists());
|
||||
|
||||
runner = TestRunners.newTestRunner(PutLambda.class);
|
||||
runner.setProperty(PutLambda.ACCESS_KEY, "abcd");
|
||||
runner.setProperty(PutLambda.SECRET_KEY, "secret key");
|
||||
AuthUtils.enableCredentialsFile(runner, CREDENTIALS_FILE);
|
||||
runner.setProperty(PutLambda.AWS_LAMBDA_FUNCTION_NAME, "functionName");
|
||||
runner.assertValid();
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void tearDown() throws Exception {
|
||||
public void tearDown() {
|
||||
runner = null;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSizeGreaterThan6MB() {
|
||||
runner = TestRunners.newTestRunner(PutLambda.class);
|
||||
runner.setProperty(PutLambda.CREDENTIALS_FILE, CREDENTIALS_FILE);
|
||||
AuthUtils.enableCredentialsFile(runner, CREDENTIALS_FILE);
|
||||
runner.setProperty(PutLambda.AWS_LAMBDA_FUNCTION_NAME, "hello");
|
||||
runner.assertValid();
|
||||
byte [] largeInput = new byte[6000001];
|
||||
for (int i = 0; i < 6000001; i++) {
|
||||
largeInput[i] = 'a';
|
||||
}
|
||||
runner.enqueue(largeInput);
|
||||
runner.run(1);
|
||||
runner.run();
|
||||
|
||||
runner.assertAllFlowFilesTransferred(PutLambda.REL_FAILURE, 1);
|
||||
}
|
||||
|
@ -73,13 +74,11 @@ public class ITPutLambda {
|
|||
*/
|
||||
@Test
|
||||
public void testIntegrationSuccess() {
|
||||
runner = TestRunners.newTestRunner(PutLambda.class);
|
||||
runner.setProperty(PutLambda.CREDENTIALS_FILE, CREDENTIALS_FILE);
|
||||
runner.setProperty(PutLambda.AWS_LAMBDA_FUNCTION_NAME, "hello");
|
||||
runner.assertValid();
|
||||
|
||||
runner.enqueue("{\"test\":\"hi\"}".getBytes());
|
||||
runner.run(1);
|
||||
runner.run();
|
||||
|
||||
runner.assertAllFlowFilesTransferred(PutLambda.REL_SUCCESS, 1);
|
||||
|
||||
|
@ -95,13 +94,11 @@ public class ITPutLambda {
|
|||
*/
|
||||
@Test
|
||||
public void testIntegrationClientErrorBadMessageBody() {
|
||||
runner = TestRunners.newTestRunner(PutLambda.class);
|
||||
runner.setProperty(PutLambda.CREDENTIALS_FILE, CREDENTIALS_FILE);
|
||||
runner.setProperty(PutLambda.AWS_LAMBDA_FUNCTION_NAME, "hello");
|
||||
runner.assertValid();
|
||||
|
||||
runner.enqueue("badbod".getBytes());
|
||||
runner.run(1);
|
||||
runner.run();
|
||||
|
||||
runner.assertAllFlowFilesTransferred(PutLambda.REL_FAILURE, 1);
|
||||
final List<MockFlowFile> ffs = runner.getFlowFilesForRelationship(PutLambda.REL_FAILURE);
|
||||
|
@ -122,8 +119,6 @@ public class ITPutLambda {
|
|||
*/
|
||||
@Test
|
||||
public void testIntegrationFailedBadStreamName() {
|
||||
runner = TestRunners.newTestRunner(PutLambda.class);
|
||||
runner.setProperty(PutLambda.CREDENTIALS_FILE, CREDENTIALS_FILE);
|
||||
runner.setProperty(PutLambda.AWS_LAMBDA_FUNCTION_NAME, "bad-function-name");
|
||||
runner.assertValid();
|
||||
|
||||
|
@ -135,8 +130,8 @@ public class ITPutLambda {
|
|||
final MockFlowFile out = ffs.iterator().next();
|
||||
assertNull(out.getAttribute(PutLambda.AWS_LAMBDA_RESULT_FUNCTION_ERROR), "Function error should be null since there is exception"
|
||||
+ out.getAttribute(PutLambda.AWS_LAMBDA_RESULT_FUNCTION_ERROR));
|
||||
assertNull(out.getAttribute(PutLambda.AWS_LAMBDA_RESULT_LOG), "log should not be null");
|
||||
assertEquals(null,out.getAttribute(PutLambda.AWS_LAMBDA_RESULT_STATUS_CODE), "Status should be equal");
|
||||
assertNull(out.getAttribute(PutLambda.AWS_LAMBDA_RESULT_LOG));
|
||||
assertEquals(null, out.getAttribute(PutLambda.AWS_LAMBDA_RESULT_STATUS_CODE));
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.nifi.processors.aws.lambda;
|
|||
import com.amazonaws.AmazonServiceException;
|
||||
import com.amazonaws.util.Base64;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -60,17 +61,13 @@ public class TestPutLambda {
|
|||
}
|
||||
};
|
||||
runner = TestRunners.newTestRunner(mockPutLambda);
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSizeGreaterThan6MB() {
|
||||
runner = TestRunners.newTestRunner(PutLambda.class);
|
||||
runner.setProperty(PutLambda.AWS_LAMBDA_FUNCTION_NAME, "hello");
|
||||
runner.assertValid();
|
||||
runner.setProperty(PutLambda.AWS_LAMBDA_FUNCTION_NAME, "test-function");
|
||||
byte [] largeInput = new byte[6000001];
|
||||
for (int i = 0; i < 6000001; i++) {
|
||||
largeInput[i] = 'a';
|
||||
}
|
||||
runner.enqueue(largeInput);
|
||||
runner.run(1);
|
||||
|
||||
|
|
|
@ -36,9 +36,7 @@ import com.amazonaws.services.s3.model.ObjectTagging;
|
|||
import com.amazonaws.services.s3.model.PutObjectRequest;
|
||||
import com.amazonaws.services.s3.model.S3ObjectSummary;
|
||||
import com.amazonaws.services.s3.model.Tag;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -119,18 +117,8 @@ public abstract class AbstractS3IT {
|
|||
return localstack.getRegion();
|
||||
}
|
||||
|
||||
protected static void setSecureProperties(final TestRunner runner, final PropertyDescriptor serviceDescriptor) throws InitializationException {
|
||||
if (runner.getProcessContext().getProperty(serviceDescriptor).isSet()) {
|
||||
return;
|
||||
}
|
||||
|
||||
final AWSCredentialsProviderControllerService creds = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("creds", creds);
|
||||
runner.setProperty(CredentialPropertyDescriptors.ACCESS_KEY_ID, localstack.getAccessKey());
|
||||
runner.setProperty(CredentialPropertyDescriptors.SECRET_KEY, localstack.getSecretKey());
|
||||
runner.enableControllerService(creds);
|
||||
|
||||
runner.setProperty(serviceDescriptor, "creds");
|
||||
protected static void setSecureProperties(final TestRunner runner) throws InitializationException {
|
||||
AuthUtils.enableAccessKey(runner, localstack.getAccessKey(), localstack.getSecretKey());
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
|
@ -253,7 +241,7 @@ public abstract class AbstractS3IT {
|
|||
TestRunner runner = TestRunners.newTestRunner(processorClass);
|
||||
|
||||
try {
|
||||
setSecureProperties(runner, AbstractS3Processor.AWS_CREDENTIALS_PROVIDER_SERVICE);
|
||||
setSecureProperties(runner);
|
||||
} catch (InitializationException e) {
|
||||
Assertions.fail("Could not set security properties");
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ public class ITFetchS3Object extends AbstractS3IT {
|
|||
public void testTryToFetchNotExistingFile() throws InitializationException {
|
||||
final TestRunner runner = TestRunners.newTestRunner(new FetchS3Object());
|
||||
|
||||
setSecureProperties(runner, PutS3Object.AWS_CREDENTIALS_PROVIDER_SERVICE);
|
||||
setSecureProperties(runner);
|
||||
runner.setProperty(FetchS3Object.S3_REGION, getRegion());
|
||||
runner.setProperty(FetchS3Object.BUCKET_WITHOUT_DEFAULT_VALUE, BUCKET_NAME);
|
||||
|
||||
|
|
|
@ -381,7 +381,7 @@ public class ITPutS3Object extends AbstractS3IT {
|
|||
|
||||
final TestRunner runner = initTestRunner();
|
||||
|
||||
setSecureProperties(runner, PutS3Object.AWS_CREDENTIALS_PROVIDER_SERVICE);
|
||||
setSecureProperties(runner);
|
||||
runner.setProperty(PutS3Object.S3_REGION, getRegion());
|
||||
runner.setProperty(PutS3Object.BUCKET_WITHOUT_DEFAULT_VALUE, BUCKET_NAME);
|
||||
runner.setProperty(PutS3Object.KEY, "${filename}");
|
||||
|
@ -698,7 +698,7 @@ public class ITPutS3Object extends AbstractS3IT {
|
|||
|
||||
final TestRunner runner = initTestRunner();
|
||||
|
||||
setSecureProperties(runner, PutS3Object.AWS_CREDENTIALS_PROVIDER_SERVICE);
|
||||
setSecureProperties(runner);
|
||||
runner.setProperty(PutS3Object.S3_REGION, getRegion());
|
||||
runner.setProperty(PutS3Object.BUCKET_WITHOUT_DEFAULT_VALUE, BUCKET_NAME);
|
||||
runner.setProperty(PutS3Object.MULTIPART_THRESHOLD, TEST_PARTSIZE_STRING);
|
||||
|
|
|
@ -21,6 +21,7 @@ import com.amazonaws.services.s3.model.AmazonS3Exception;
|
|||
import com.amazonaws.services.s3.model.DeleteObjectRequest;
|
||||
import com.amazonaws.services.s3.model.DeleteVersionRequest;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
@ -50,6 +51,7 @@ public class TestDeleteS3Object {
|
|||
}
|
||||
};
|
||||
runner = TestRunners.newTestRunner(mockDeleteS3Object);
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -28,10 +28,10 @@ import com.amazonaws.services.s3.model.ObjectMetadata;
|
|||
import com.amazonaws.services.s3.model.S3Object;
|
||||
import com.amazonaws.util.StringInputStream;
|
||||
import org.apache.nifi.components.ConfigVerificationResult;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.exception.FlowFileAccessException;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -62,7 +62,6 @@ public class TestFetchS3Object {
|
|||
|
||||
private TestRunner runner = null;
|
||||
private FetchS3Object mockFetchS3Object = null;
|
||||
private AmazonS3Client actualS3Client = null;
|
||||
private AmazonS3Client mockS3Client = null;
|
||||
|
||||
@BeforeEach
|
||||
|
@ -76,6 +75,7 @@ public class TestFetchS3Object {
|
|||
}
|
||||
};
|
||||
runner = TestRunners.newTestRunner(mockFetchS3Object);
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -358,32 +358,4 @@ public class TestFetchS3Object {
|
|||
runner.assertAllFlowFilesTransferred(FetchS3Object.REL_FAILURE, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetPropertyDescriptors() {
|
||||
FetchS3Object processor = new FetchS3Object();
|
||||
List<PropertyDescriptor> pd = processor.getSupportedPropertyDescriptors();
|
||||
assertEquals(23, pd.size(), "size should be eq");
|
||||
assertTrue(pd.contains(FetchS3Object.ACCESS_KEY));
|
||||
assertTrue(pd.contains(FetchS3Object.AWS_CREDENTIALS_PROVIDER_SERVICE));
|
||||
assertTrue(pd.contains(FetchS3Object.BUCKET_WITHOUT_DEFAULT_VALUE));
|
||||
assertTrue(pd.contains(FetchS3Object.CREDENTIALS_FILE));
|
||||
assertTrue(pd.contains(FetchS3Object.ENDPOINT_OVERRIDE));
|
||||
assertTrue(pd.contains(FetchS3Object.KEY));
|
||||
assertTrue(pd.contains(FetchS3Object.S3_REGION));
|
||||
assertTrue(pd.contains(FetchS3Object.SECRET_KEY));
|
||||
assertTrue(pd.contains(FetchS3Object.SIGNER_OVERRIDE));
|
||||
assertTrue(pd.contains(FetchS3Object.S3_CUSTOM_SIGNER_CLASS_NAME));
|
||||
assertTrue(pd.contains(FetchS3Object.S3_CUSTOM_SIGNER_MODULE_LOCATION));
|
||||
assertTrue(pd.contains(FetchS3Object.SSL_CONTEXT_SERVICE));
|
||||
assertTrue(pd.contains(FetchS3Object.TIMEOUT));
|
||||
assertTrue(pd.contains(FetchS3Object.VERSION_ID));
|
||||
assertTrue(pd.contains(FetchS3Object.ENCRYPTION_SERVICE));
|
||||
assertTrue(pd.contains(FetchS3Object.PROXY_CONFIGURATION_SERVICE));
|
||||
assertTrue(pd.contains(FetchS3Object.PROXY_HOST));
|
||||
assertTrue(pd.contains(FetchS3Object.PROXY_HOST_PORT));
|
||||
assertTrue(pd.contains(FetchS3Object.PROXY_USERNAME));
|
||||
assertTrue(pd.contains(FetchS3Object.PROXY_PASSWORD));
|
||||
assertTrue(pd.contains(FetchS3Object.REQUESTER_PAYS));
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.nifi.components.ConfigVerificationResult;
|
|||
import org.apache.nifi.components.state.Scope;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.VerifiableProcessor;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.serialization.record.MockRecordWriter;
|
||||
import org.apache.nifi.state.MockStateManager;
|
||||
|
@ -78,6 +79,7 @@ public class TestListS3 {
|
|||
}
|
||||
};
|
||||
runner = TestRunners.newTestRunner(mockListS3);
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -37,10 +37,10 @@ import com.amazonaws.services.s3.model.StorageClass;
|
|||
import com.amazonaws.services.s3.model.Tag;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.components.AllowableValue;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processors.aws.signer.AwsSignerType;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -61,7 +61,6 @@ import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
|
|||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertSame;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
|
||||
|
@ -82,6 +81,7 @@ public class TestPutS3Object {
|
|||
}
|
||||
};
|
||||
runner = TestRunners.newTestRunner(putS3Object);
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
|
||||
// MockPropertyValue does not evaluate system properties, set it in a variable with the same name
|
||||
runner.setEnvironmentVariableValue("java.io.tmpdir", System.getProperty("java.io.tmpdir"));
|
||||
|
@ -279,54 +279,6 @@ public class TestPutS3Object {
|
|||
assertEquals(expectedPath, file.getAbsolutePath());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetPropertyDescriptors() {
|
||||
PutS3Object processor = new PutS3Object();
|
||||
List<PropertyDescriptor> pd = processor.getSupportedPropertyDescriptors();
|
||||
assertEquals(41, pd.size(), "size should be eq");
|
||||
assertTrue(pd.contains(PutS3Object.ACCESS_KEY));
|
||||
assertTrue(pd.contains(PutS3Object.AWS_CREDENTIALS_PROVIDER_SERVICE));
|
||||
assertTrue(pd.contains(PutS3Object.BUCKET_WITHOUT_DEFAULT_VALUE));
|
||||
assertTrue(pd.contains(PutS3Object.CANNED_ACL));
|
||||
assertTrue(pd.contains(PutS3Object.CREDENTIALS_FILE));
|
||||
assertTrue(pd.contains(PutS3Object.ENDPOINT_OVERRIDE));
|
||||
assertTrue(pd.contains(PutS3Object.FULL_CONTROL_USER_LIST));
|
||||
assertTrue(pd.contains(PutS3Object.KEY));
|
||||
assertTrue(pd.contains(PutS3Object.OWNER));
|
||||
assertTrue(pd.contains(PutS3Object.READ_ACL_LIST));
|
||||
assertTrue(pd.contains(PutS3Object.READ_USER_LIST));
|
||||
assertTrue(pd.contains(PutS3Object.S3_REGION));
|
||||
assertTrue(pd.contains(PutS3Object.SECRET_KEY));
|
||||
assertTrue(pd.contains(PutS3Object.SIGNER_OVERRIDE));
|
||||
assertTrue(pd.contains(PutS3Object.S3_CUSTOM_SIGNER_CLASS_NAME));
|
||||
assertTrue(pd.contains(PutS3Object.S3_CUSTOM_SIGNER_MODULE_LOCATION));
|
||||
assertTrue(pd.contains(PutS3Object.SSL_CONTEXT_SERVICE));
|
||||
assertTrue(pd.contains(PutS3Object.TIMEOUT));
|
||||
assertTrue(pd.contains(PutS3Object.EXPIRATION_RULE_ID));
|
||||
assertTrue(pd.contains(PutS3Object.STORAGE_CLASS));
|
||||
assertTrue(pd.contains(PutS3Object.WRITE_ACL_LIST));
|
||||
assertTrue(pd.contains(PutS3Object.WRITE_USER_LIST));
|
||||
assertTrue(pd.contains(PutS3Object.SERVER_SIDE_ENCRYPTION));
|
||||
assertTrue(pd.contains(PutS3Object.ENCRYPTION_SERVICE));
|
||||
assertTrue(pd.contains(PutS3Object.USE_CHUNKED_ENCODING));
|
||||
assertTrue(pd.contains(PutS3Object.USE_PATH_STYLE_ACCESS));
|
||||
assertTrue(pd.contains(PutS3Object.PROXY_CONFIGURATION_SERVICE));
|
||||
assertTrue(pd.contains(PutS3Object.PROXY_HOST));
|
||||
assertTrue(pd.contains(PutS3Object.PROXY_HOST_PORT));
|
||||
assertTrue(pd.contains(PutS3Object.PROXY_USERNAME));
|
||||
assertTrue(pd.contains(PutS3Object.PROXY_PASSWORD));
|
||||
assertTrue(pd.contains(PutS3Object.OBJECT_TAGS_PREFIX));
|
||||
assertTrue(pd.contains(PutS3Object.REMOVE_TAG_PREFIX));
|
||||
assertTrue(pd.contains(PutS3Object.CONTENT_TYPE));
|
||||
assertTrue(pd.contains(PutS3Object.CONTENT_DISPOSITION));
|
||||
assertTrue(pd.contains(PutS3Object.CACHE_CONTROL));
|
||||
assertTrue(pd.contains(PutS3Object.MULTIPART_THRESHOLD));
|
||||
assertTrue(pd.contains(PutS3Object.MULTIPART_PART_SIZE));
|
||||
assertTrue(pd.contains(PutS3Object.MULTIPART_S3_AGEOFF_INTERVAL));
|
||||
assertTrue(pd.contains(PutS3Object.MULTIPART_S3_MAX_AGE));
|
||||
assertTrue(pd.contains(PutS3Object.MULTIPART_TEMP_DIR));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCustomSigner() {
|
||||
final AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain();
|
||||
|
|
|
@ -25,9 +25,8 @@ import com.amazonaws.services.s3.model.AmazonS3Exception;
|
|||
import com.amazonaws.services.s3.model.GetObjectTaggingResult;
|
||||
import com.amazonaws.services.s3.model.SetObjectTaggingRequest;
|
||||
import com.amazonaws.services.s3.model.Tag;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.proxy.ProxyConfigurationService;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -64,6 +63,7 @@ public class TestTagS3Object {
|
|||
}
|
||||
};
|
||||
runner = TestRunners.newTestRunner(mockTagS3Object);
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -259,36 +259,6 @@ public class TestTagS3Object {
|
|||
runner.run(1);
|
||||
|
||||
runner.assertAllFlowFilesTransferred(DeleteS3Object.REL_FAILURE, 1);
|
||||
ArgumentCaptor<SetObjectTaggingRequest> captureRequest = ArgumentCaptor.forClass(SetObjectTaggingRequest.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetPropertyDescriptors() {
|
||||
TagS3Object processor = new TagS3Object();
|
||||
List<PropertyDescriptor> pd = processor.getSupportedPropertyDescriptors();
|
||||
assertEquals(22, pd.size(), "size should be eq");
|
||||
assertTrue(pd.contains(TagS3Object.ACCESS_KEY));
|
||||
assertTrue(pd.contains(TagS3Object.AWS_CREDENTIALS_PROVIDER_SERVICE));
|
||||
assertTrue(pd.contains(TagS3Object.BUCKET_WITHOUT_DEFAULT_VALUE));
|
||||
assertTrue(pd.contains(TagS3Object.CREDENTIALS_FILE));
|
||||
assertTrue(pd.contains(TagS3Object.ENDPOINT_OVERRIDE));
|
||||
assertTrue(pd.contains(TagS3Object.KEY));
|
||||
assertTrue(pd.contains(TagS3Object.S3_REGION));
|
||||
assertTrue(pd.contains(TagS3Object.SECRET_KEY));
|
||||
assertTrue(pd.contains(TagS3Object.SIGNER_OVERRIDE));
|
||||
assertTrue(pd.contains(TagS3Object.S3_CUSTOM_SIGNER_CLASS_NAME));
|
||||
assertTrue(pd.contains(TagS3Object.S3_CUSTOM_SIGNER_MODULE_LOCATION));
|
||||
assertTrue(pd.contains(TagS3Object.SSL_CONTEXT_SERVICE));
|
||||
assertTrue(pd.contains(TagS3Object.TIMEOUT));
|
||||
assertTrue(pd.contains(ProxyConfigurationService.PROXY_CONFIGURATION_SERVICE));
|
||||
assertTrue(pd.contains(TagS3Object.PROXY_HOST));
|
||||
assertTrue(pd.contains(TagS3Object.PROXY_HOST_PORT));
|
||||
assertTrue(pd.contains(TagS3Object.PROXY_USERNAME));
|
||||
assertTrue(pd.contains(TagS3Object.PROXY_PASSWORD));
|
||||
assertTrue(pd.contains(TagS3Object.TAG_KEY));
|
||||
assertTrue(pd.contains(TagS3Object.TAG_VALUE));
|
||||
assertTrue(pd.contains(TagS3Object.APPEND_TAG));
|
||||
assertTrue(pd.contains(TagS3Object.VERSION_ID));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -16,12 +16,14 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.aws.sns;
|
||||
|
||||
import org.apache.nifi.processors.aws.AbstractAWSCredentialsProviderProcessor;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.Assumptions;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.HashMap;
|
||||
|
@ -34,13 +36,18 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
|||
*/
|
||||
public class ITPutSNS {
|
||||
|
||||
private final String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
|
||||
private static final String CREDENTIALS_FILE = System.getProperty("user.home") + "/aws-credentials.properties";
|
||||
private final String TOPIC_ARN = "Add SNS ARN here";
|
||||
|
||||
@BeforeAll
|
||||
public static void assumeCredentialsFileExists() {
|
||||
Assumptions.assumeTrue(new File(CREDENTIALS_FILE).exists());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPublish() throws IOException {
|
||||
final TestRunner runner = TestRunners.newTestRunner(new PutSNS());
|
||||
runner.setProperty(PutSNS.CREDENTIALS_FILE, CREDENTIALS_FILE);
|
||||
AuthUtils.enableCredentialsFile(runner, CREDENTIALS_FILE);
|
||||
runner.setProperty(PutSNS.ARN, TOPIC_ARN);
|
||||
assertTrue(runner.setProperty("DynamicProperty", "hello!").isValid());
|
||||
|
||||
|
@ -57,17 +64,8 @@ public class ITPutSNS {
|
|||
final TestRunner runner = TestRunners.newTestRunner(new PutSNS());
|
||||
runner.setProperty(PutSNS.ARN, TOPIC_ARN);
|
||||
assertTrue(runner.setProperty("DynamicProperty", "hello!").isValid());
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
|
||||
runner.setProperty(serviceImpl, AbstractAWSCredentialsProviderProcessor.CREDENTIALS_FILE, System.getProperty("user.home") + "/aws-credentials.properties");
|
||||
runner.enableControllerService(serviceImpl);
|
||||
|
||||
runner.assertValid(serviceImpl);
|
||||
|
||||
runner.setProperty(PutSNS.AWS_CREDENTIALS_PROVIDER_SERVICE, "awsCredentialsProvider");
|
||||
|
||||
AuthUtils.enableCredentialsFile(runner, CREDENTIALS_FILE);
|
||||
runner.run(1);
|
||||
|
||||
final Map<String, String> attrs = new HashMap<>();
|
||||
|
|
|
@ -18,6 +18,8 @@ package org.apache.nifi.processors.aws.sns;
|
|||
|
||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -57,8 +59,8 @@ public class TestPutSNS {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testPublish() {
|
||||
runner.setProperty(PutSNS.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
public void testPublish() throws InitializationException {
|
||||
AuthUtils.enableCredentialsFile(runner, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(PutSNS.ARN, "arn:aws:sns:us-west-2:123456789012:test-topic-1");
|
||||
runner.setProperty(PutSNS.SUBJECT, "${eval.subject}");
|
||||
assertTrue(runner.setProperty("DynamicProperty", "hello!").isValid());
|
||||
|
@ -87,8 +89,8 @@ public class TestPutSNS {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testPublishFIFO() {
|
||||
runner.setProperty(PutSNS.CREDENTIALS_FILE, "src/test/resources/mock-aws-credentials.properties");
|
||||
public void testPublishFIFO() throws InitializationException {
|
||||
AuthUtils.enableCredentialsFile(runner, "src/test/resources/mock-aws-credentials.properties");
|
||||
runner.setProperty(PutSNS.ARN, "arn:aws:sns:us-west-2:123456789012:test-topic-1.fifo");
|
||||
runner.setProperty(PutSNS.SUBJECT, "${eval.subject}");
|
||||
runner.setProperty(PutSNS.MESSAGEDEDUPLICATIONID, "${myuuid}");
|
||||
|
@ -129,6 +131,7 @@ public class TestPutSNS {
|
|||
runner.enqueue("Test Message Content", ffAttributes);
|
||||
Mockito.when(mockSNSClient.publish(Mockito.any(PublishRequest.class))).thenThrow(SnsException.builder().build());
|
||||
|
||||
AuthUtils.enableAccessKey(runner, "accessKey", "secretKey");
|
||||
runner.run();
|
||||
|
||||
ArgumentCaptor<PublishRequest> captureRequest = ArgumentCaptor.forClass(PublishRequest.class);
|
||||
|
|
|
@ -18,14 +18,11 @@
|
|||
package org.apache.nifi.processors.aws.sqs;
|
||||
|
||||
import org.apache.nifi.processor.Processor;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.factory.CredentialPropertyDescriptors;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import org.apache.nifi.processors.aws.s3.AbstractS3Processor;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.testcontainers.containers.localstack.LocalStackContainer;
|
||||
import org.testcontainers.utility.DockerImageName;
|
||||
|
@ -86,18 +83,7 @@ public abstract class AbstractSQSIT {
|
|||
|
||||
protected TestRunner initRunner(final Class<? extends Processor> processorClass) {
|
||||
TestRunner runner = TestRunners.newTestRunner(processorClass);
|
||||
|
||||
try {
|
||||
final AWSCredentialsProviderControllerService creds = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("creds", creds);
|
||||
runner.setProperty(CredentialPropertyDescriptors.ACCESS_KEY_ID, localstack.getAccessKey());
|
||||
runner.setProperty(CredentialPropertyDescriptors.SECRET_KEY, localstack.getSecretKey());
|
||||
runner.enableControllerService(creds);
|
||||
|
||||
runner.setProperty(PutSQS.AWS_CREDENTIALS_PROVIDER_SERVICE, "creds");
|
||||
} catch (InitializationException e) {
|
||||
Assertions.fail("Could not set security properties");
|
||||
}
|
||||
AuthUtils.enableAccessKey(runner, localstack.getAccessKey(), localstack.getSecretKey());
|
||||
|
||||
runner.setProperty(AbstractS3Processor.S3_REGION, localstack.getRegion());
|
||||
runner.setProperty(AbstractS3Processor.ENDPOINT_OVERRIDE, localstack.getEndpointOverride(LocalStackContainer.Service.SQS).toString());
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
package org.apache.nifi.processors.aws.sqs;
|
||||
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
@ -55,6 +56,7 @@ public class TestDeleteSQS {
|
|||
}
|
||||
};
|
||||
runner = TestRunners.newTestRunner(mockDeleteSQS);
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
package org.apache.nifi.processors.aws.sqs;
|
||||
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -55,6 +56,7 @@ public class TestGetSQS {
|
|||
}
|
||||
};
|
||||
runner = TestRunners.newTestRunner(mockGetSQS);
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
package org.apache.nifi.processors.aws.sqs;
|
||||
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
@ -50,6 +51,7 @@ public class TestPutSQS {
|
|||
}
|
||||
};
|
||||
runner = TestRunners.newTestRunner(mockPutSQS);
|
||||
AuthUtils.enableAccessKey(runner, "accessKeyId", "secretKey");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.nifi.processors.aws.testutil;
|
||||
|
||||
import org.apache.nifi.processors.aws.AbstractAWSCredentialsProviderProcessor;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
|
||||
public class AuthUtils {
|
||||
public static void enableCredentialsFile(final TestRunner runner, final String credentialsFile) {
|
||||
final AWSCredentialsProviderControllerService credentialsService = new AWSCredentialsProviderControllerService();
|
||||
try {
|
||||
runner.addControllerService("creds", credentialsService);
|
||||
} catch (final InitializationException e) {
|
||||
throw new AssertionError("Failed to enable AWSCredentialsProviderControllerService", e);
|
||||
}
|
||||
|
||||
runner.setProperty(credentialsService, AWSCredentialsProviderControllerService.CREDENTIALS_FILE, credentialsFile);
|
||||
runner.enableControllerService(credentialsService);
|
||||
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.AWS_CREDENTIALS_PROVIDER_SERVICE, "creds");
|
||||
}
|
||||
|
||||
public static void enableAccessKey(final TestRunner runner, final String accessKeyId, final String secretKey) {
|
||||
final AWSCredentialsProviderControllerService credentialsService = new AWSCredentialsProviderControllerService();
|
||||
try {
|
||||
runner.addControllerService("creds", credentialsService);
|
||||
} catch (final InitializationException e) {
|
||||
throw new AssertionError("Failed to enable AWSCredentialsProviderControllerService", e);
|
||||
}
|
||||
|
||||
runner.setProperty(credentialsService, AWSCredentialsProviderControllerService.ACCESS_KEY_ID, accessKeyId);
|
||||
runner.setProperty(credentialsService, AWSCredentialsProviderControllerService.SECRET_KEY, secretKey);
|
||||
runner.enableControllerService(credentialsService);
|
||||
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.AWS_CREDENTIALS_PROVIDER_SERVICE, "creds");
|
||||
}
|
||||
}
|
|
@ -21,16 +21,17 @@ import okhttp3.mockwebserver.MockResponse;
|
|||
import okhttp3.mockwebserver.MockWebServer;
|
||||
import okhttp3.mockwebserver.RecordedRequest;
|
||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||
import org.apache.nifi.processors.aws.AbstractAWSCredentialsProviderProcessor;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.provenance.ProvenanceEventRecord;
|
||||
import org.apache.nifi.provenance.ProvenanceEventType;
|
||||
import org.apache.nifi.proxy.StandardProxyConfigurationService;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.Proxy;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
|
@ -52,24 +53,16 @@ public abstract class TestInvokeAWSGatewayApiCommon {
|
|||
|
||||
protected MockWebServer mockWebServer;
|
||||
|
||||
protected void setupControllerService() throws InitializationException {
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, InvokeAWSGatewayApi.ACCESS_KEY, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, InvokeAWSGatewayApi.SECRET_KEY, "awsSecretKey");
|
||||
runner.enableControllerService(serviceImpl);
|
||||
runner.setProperty(InvokeAWSGatewayApi.AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
"awsCredentialsProvider");
|
||||
protected void setupControllerService() {
|
||||
AuthUtils.enableAccessKey(runner, "awsAccessKey", "awsSecretKey");
|
||||
}
|
||||
|
||||
protected void setupAuth() {
|
||||
runner.setProperty(InvokeAWSGatewayApi.ACCESS_KEY, "testAccessKey");
|
||||
runner.setProperty(InvokeAWSGatewayApi.SECRET_KEY, "testSecretKey");
|
||||
AuthUtils.enableAccessKey(runner, "awsAccessKey", "awsSecretKey");
|
||||
}
|
||||
|
||||
protected void setupCredFile() {
|
||||
runner.setProperty(AbstractAWSCredentialsProviderProcessor.CREDENTIALS_FILE,
|
||||
"src/test/resources/mock-aws-credentials.properties");
|
||||
protected void setupCredFile() throws InitializationException {
|
||||
AuthUtils.enableCredentialsFile(runner, "src/test/resources/mock-aws-credentials.properties");
|
||||
}
|
||||
|
||||
public void setupEndpointAndRegion() {
|
||||
|
@ -1273,13 +1266,16 @@ public abstract class TestInvokeAWSGatewayApiCommon {
|
|||
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROP_AWS_GATEWAY_API_ENDPOINT, "http://nifi.apache.org/");
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROXY_HOST, "${proxy.host}");
|
||||
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROXY_HOST_PORT, "${proxy.port}");
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROXY_USERNAME, "${proxy.username}");
|
||||
|
||||
runner.assertNotValid();
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROXY_PASSWORD, "${proxy.password}");
|
||||
final StandardProxyConfigurationService proxyService = new StandardProxyConfigurationService();
|
||||
runner.addControllerService("proxy", proxyService);
|
||||
runner.setProperty(proxyService, StandardProxyConfigurationService.PROXY_TYPE, Proxy.Type.HTTP.name());
|
||||
runner.setProperty(proxyService, StandardProxyConfigurationService.PROXY_SERVER_HOST, "${proxy.host}");
|
||||
runner.setProperty(proxyService, StandardProxyConfigurationService.PROXY_SERVER_PORT, "${proxy.port}");
|
||||
runner.setProperty(proxyService, StandardProxyConfigurationService.PROXY_USER_NAME, "${proxy.username}");
|
||||
runner.setProperty(proxyService, StandardProxyConfigurationService.PROXY_USER_PASSWORD, "${proxy.password}");
|
||||
runner.enableControllerService(proxyService);
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROXY_CONFIGURATION_SERVICE, "proxy");
|
||||
|
||||
createFlowFiles(runner);
|
||||
|
||||
|
|
|
@ -40,6 +40,7 @@ public class TestInvokeAmazonGatewayApi extends TestInvokeAWSGatewayApiCommon {
|
|||
@Test
|
||||
public void testStaticCredentials() throws Exception {
|
||||
runner.clearProperties();
|
||||
|
||||
setupAuth();
|
||||
test200();
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.http.message.BasicHttpResponse;
|
|||
import org.apache.http.message.BasicStatusLine;
|
||||
import org.apache.http.protocol.HttpContext;
|
||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
|
||||
import org.apache.nifi.processors.aws.testutil.AuthUtils;
|
||||
import org.apache.nifi.util.MockFlowFile;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
import org.apache.nifi.util.TestRunners;
|
||||
|
@ -62,19 +62,12 @@ public class TestInvokeAmazonGatewayApiMock {
|
|||
runner = TestRunners.newTestRunner(mockGetApi);
|
||||
runner.setValidateExpressionUsage(false);
|
||||
|
||||
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
|
||||
runner.addControllerService("awsCredentialsProvider", serviceImpl);
|
||||
runner.setProperty(serviceImpl, InvokeAWSGatewayApi.ACCESS_KEY, "awsAccessKey");
|
||||
runner.setProperty(serviceImpl, InvokeAWSGatewayApi.SECRET_KEY, "awsSecretKey");
|
||||
runner.enableControllerService(serviceImpl);
|
||||
AuthUtils.enableAccessKey(runner, "awsAccessKey", "awsSecretKey");
|
||||
|
||||
runner.setProperty(InvokeAWSGatewayApi.AWS_CREDENTIALS_PROVIDER_SERVICE,
|
||||
"awsCredentialsProvider");
|
||||
runner.setProperty(InvokeAWSGatewayApi.REGION, "us-east-1");
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROP_AWS_API_KEY, "abcd");
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/TEST");
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROP_AWS_GATEWAY_API_ENDPOINT,
|
||||
"https://foobar.execute-api.us-east-1.amazonaws.com");
|
||||
runner.setProperty(InvokeAWSGatewayApi.PROP_AWS_GATEWAY_API_ENDPOINT, "https://foobar.execute-api.us-east-1.amazonaws.com");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -60,6 +60,8 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.logging.LogLevel;
|
||||
import org.apache.nifi.logging.LogRepositoryFactory;
|
||||
import org.apache.nifi.logging.StandardLoggingContext;
|
||||
import org.apache.nifi.migration.ControllerServiceCreationDetails;
|
||||
import org.apache.nifi.migration.ControllerServiceFactory;
|
||||
import org.apache.nifi.migration.StandardPropertyConfiguration;
|
||||
import org.apache.nifi.migration.StandardRelationshipConfiguration;
|
||||
import org.apache.nifi.nar.ExtensionManager;
|
||||
|
@ -228,27 +230,12 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
|
|||
try {
|
||||
if (processorDetails.getProcClass().isAnnotationPresent(DefaultSchedule.class)) {
|
||||
DefaultSchedule dsc = processorDetails.getProcClass().getAnnotation(DefaultSchedule.class);
|
||||
try {
|
||||
this.setSchedulingStrategy(dsc.strategy());
|
||||
} catch (Throwable ex) {
|
||||
LOG.error(String.format("Error while setting scheduling strategy from DefaultSchedule annotation: %s", ex.getMessage()), ex);
|
||||
}
|
||||
try {
|
||||
this.setSchedulingPeriod(dsc.period());
|
||||
} catch (Throwable ex) {
|
||||
this.setSchedulingStrategy(SchedulingStrategy.TIMER_DRIVEN);
|
||||
LOG.error(String.format("Error while setting scheduling period from DefaultSchedule annotation: %s", ex.getMessage()), ex);
|
||||
}
|
||||
if (!processorDetails.isTriggeredSerially()) {
|
||||
try {
|
||||
setMaxConcurrentTasks(dsc.concurrentTasks());
|
||||
} catch (Throwable ex) {
|
||||
LOG.error(String.format("Error while setting max concurrent tasks from DefaultSchedule annotation: %s", ex.getMessage()), ex);
|
||||
}
|
||||
}
|
||||
setSchedulingStrategy(dsc.strategy());
|
||||
setSchedulingPeriod(dsc.period());
|
||||
setMaxConcurrentTasks(dsc.concurrentTasks());
|
||||
}
|
||||
} catch (Throwable ex) {
|
||||
LOG.error(String.format("Error while setting default schedule from DefaultSchedule annotation: %s",ex.getMessage()),ex);
|
||||
} catch (final Exception e) {
|
||||
LOG.error("Error while setting default schedule from DefaultSchedule annotation", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -272,9 +259,6 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
|
|||
return processorRef.get().getBundleCoordinate();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return comments about this specific processor instance
|
||||
*/
|
||||
@Override
|
||||
public String getComments() {
|
||||
return comments.get();
|
||||
|
@ -305,14 +289,6 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
|
|||
return getProcessor().getClass().isAnnotationPresent(DeprecationNotice.class);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Provides and opportunity to retain information about this particular
|
||||
* processor instance
|
||||
*
|
||||
* @param comments
|
||||
* new comments
|
||||
*/
|
||||
@Override
|
||||
public synchronized void setComments(final String comments) {
|
||||
this.comments.set(CharacterFilterUtils.filterInvalidXmlCharacters(comments));
|
||||
|
@ -334,9 +310,9 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
|
|||
}
|
||||
|
||||
@Override
|
||||
public synchronized void setStyle(final Map<String, String> style) {
|
||||
public void setStyle(final Map<String, String> style) {
|
||||
if (style != null) {
|
||||
this.style.set(Collections.unmodifiableMap(new HashMap<>(style)));
|
||||
this.style.set(Map.copyOf(style));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2102,9 +2078,9 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
|
|||
}
|
||||
|
||||
@Override
|
||||
public void migrateConfiguration(final ProcessContext context) {
|
||||
public void migrateConfiguration(final ControllerServiceFactory serviceFactory) {
|
||||
try {
|
||||
migrateProperties(context);
|
||||
migrateProperties(serviceFactory);
|
||||
} catch (final Exception e) {
|
||||
LOG.error("Failed to migrate Property Configuration for {}.", this, e);
|
||||
}
|
||||
|
@ -2116,19 +2092,27 @@ public class StandardProcessorNode extends ProcessorNode implements Connectable
|
|||
}
|
||||
}
|
||||
|
||||
private void migrateProperties(final ProcessContext context) {
|
||||
private void migrateProperties(final ControllerServiceFactory serviceFactory) {
|
||||
final Processor processor = getProcessor();
|
||||
|
||||
final StandardPropertyConfiguration propertyConfig = new StandardPropertyConfiguration(context.getAllProperties(), toString());
|
||||
final StandardPropertyConfiguration propertyConfig = new StandardPropertyConfiguration(toPropertyNameMap(getEffectivePropertyValues()),
|
||||
toPropertyNameMap(getRawPropertyValues()), this::mapRawValueToEffectiveValue, toString(), serviceFactory);
|
||||
try (final NarCloseable nc = NarCloseable.withComponentNarLoader(getExtensionManager(), processor.getClass(), getIdentifier())) {
|
||||
processor.migrateProperties(propertyConfig);
|
||||
}
|
||||
|
||||
if (propertyConfig.isModified()) {
|
||||
// Create any necessary Controller Services. It is important that we create the services
|
||||
// before updating the processor's properties, as it's necessary in order to properly account
|
||||
// for the Controller Service References.
|
||||
final List<ControllerServiceCreationDetails> servicesCreated = propertyConfig.getCreatedServices();
|
||||
servicesCreated.forEach(serviceFactory::create);
|
||||
|
||||
overwriteProperties(propertyConfig.getProperties());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void migrateRelationships() {
|
||||
final Processor processor = getProcessor();
|
||||
|
||||
|
|
|
@ -44,6 +44,8 @@ import org.apache.nifi.controller.service.ControllerServiceProvider;
|
|||
import org.apache.nifi.controller.service.StandardConfigurationContext;
|
||||
import org.apache.nifi.groups.ProcessGroup;
|
||||
import org.apache.nifi.logging.ComponentLog;
|
||||
import org.apache.nifi.migration.ControllerServiceCreationDetails;
|
||||
import org.apache.nifi.migration.ControllerServiceFactory;
|
||||
import org.apache.nifi.migration.StandardPropertyConfiguration;
|
||||
import org.apache.nifi.nar.ExtensionManager;
|
||||
import org.apache.nifi.nar.InstanceClassLoader;
|
||||
|
@ -426,10 +428,12 @@ public abstract class AbstractReportingTaskNode extends AbstractComponentNode im
|
|||
}
|
||||
|
||||
@Override
|
||||
public void migrateConfiguration(final ConfigurationContext context) {
|
||||
public void migrateConfiguration(final ControllerServiceFactory serviceFactory) {
|
||||
final ReportingTask task = getReportingTask();
|
||||
|
||||
final StandardPropertyConfiguration propertyConfig = new StandardPropertyConfiguration(context.getAllProperties(), toString());
|
||||
final StandardPropertyConfiguration propertyConfig = new StandardPropertyConfiguration(toPropertyNameMap(getEffectivePropertyValues()),
|
||||
toPropertyNameMap(getRawPropertyValues()), this::mapRawValueToEffectiveValue, toString(), serviceFactory);
|
||||
|
||||
try (final NarCloseable nc = NarCloseable.withComponentNarLoader(getExtensionManager(), task.getClass(), getIdentifier())) {
|
||||
task.migrateProperties(propertyConfig);
|
||||
} catch (final Exception e) {
|
||||
|
@ -437,6 +441,12 @@ public abstract class AbstractReportingTaskNode extends AbstractComponentNode im
|
|||
}
|
||||
|
||||
if (propertyConfig.isModified()) {
|
||||
// Create any necessary Controller Services. It is important that we create the services
|
||||
// before updating the reporting tasks's properties, as it's necessary in order to properly account
|
||||
// for the Controller Service References.
|
||||
final List<ControllerServiceCreationDetails> servicesCreated = propertyConfig.getCreatedServices();
|
||||
servicesCreated.forEach(serviceFactory::create);
|
||||
|
||||
overwriteProperties(propertyConfig.getProperties());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,6 +53,8 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.logging.LogLevel;
|
||||
import org.apache.nifi.logging.LogRepositoryFactory;
|
||||
import org.apache.nifi.logging.StandardLoggingContext;
|
||||
import org.apache.nifi.migration.ControllerServiceCreationDetails;
|
||||
import org.apache.nifi.migration.ControllerServiceFactory;
|
||||
import org.apache.nifi.migration.StandardPropertyConfiguration;
|
||||
import org.apache.nifi.nar.ExtensionManager;
|
||||
import org.apache.nifi.nar.InstanceClassLoader;
|
||||
|
@ -834,22 +836,28 @@ public class StandardControllerServiceNode extends AbstractComponentNode impleme
|
|||
}
|
||||
|
||||
@Override
|
||||
public void migrateConfiguration(final ConfigurationContext context) {
|
||||
final ControllerService service = getControllerServiceImplementation();
|
||||
public void migrateConfiguration(final ControllerServiceFactory serviceFactory) {
|
||||
final StandardPropertyConfiguration propertyConfig = new StandardPropertyConfiguration(toPropertyNameMap(getEffectivePropertyValues()),
|
||||
toPropertyNameMap(getRawPropertyValues()), super::mapRawValueToEffectiveValue, toString(), serviceFactory);
|
||||
|
||||
final StandardPropertyConfiguration propertyConfig = new StandardPropertyConfiguration(context.getAllProperties(), toString());
|
||||
try (final NarCloseable nc = NarCloseable.withComponentNarLoader(getExtensionManager(), service.getClass(), getIdentifier())) {
|
||||
service.migrateProperties(propertyConfig);
|
||||
final ControllerService implementation = getControllerServiceImplementation();
|
||||
try (final NarCloseable nc = NarCloseable.withComponentNarLoader(getExtensionManager(), implementation.getClass(), getIdentifier())) {
|
||||
implementation.migrateProperties(propertyConfig);
|
||||
} catch (final Exception e) {
|
||||
LOG.error("Failed to migrate Property Configuration for {}.", this, e);
|
||||
}
|
||||
|
||||
if (propertyConfig.isModified()) {
|
||||
// Create any necessary Controller Services. It is important that we create the services
|
||||
// before updating this service's properties, as it's necessary in order to properly account
|
||||
// for the Controller Service References.
|
||||
final List<ControllerServiceCreationDetails> servicesCreated = propertyConfig.getCreatedServices();
|
||||
servicesCreated.forEach(serviceFactory::create);
|
||||
|
||||
overwriteProperties(propertyConfig.getProperties());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void performFlowAnalysisOnThis() {
|
||||
getValidationContextFactory().getFlowAnalyzer().ifPresent(flowAnalyzer -> flowAnalyzer.analyzeControllerService(this));
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.apache.nifi.connectable.Position;
|
|||
import org.apache.nifi.connectable.Size;
|
||||
import org.apache.nifi.controller.BackoffMechanism;
|
||||
import org.apache.nifi.controller.ComponentNode;
|
||||
import org.apache.nifi.controller.ConfigurationContext;
|
||||
import org.apache.nifi.controller.FlowAnalysisRuleNode;
|
||||
import org.apache.nifi.controller.ParameterProviderNode;
|
||||
import org.apache.nifi.controller.ProcessorNode;
|
||||
|
@ -82,6 +81,8 @@ import org.apache.nifi.groups.RemoteProcessGroup;
|
|||
import org.apache.nifi.groups.RemoteProcessGroupPortDescriptor;
|
||||
import org.apache.nifi.groups.StandardVersionedFlowStatus;
|
||||
import org.apache.nifi.logging.LogLevel;
|
||||
import org.apache.nifi.migration.ControllerServiceFactory;
|
||||
import org.apache.nifi.migration.StandardControllerServiceFactory;
|
||||
import org.apache.nifi.parameter.Parameter;
|
||||
import org.apache.nifi.parameter.ParameterContext;
|
||||
import org.apache.nifi.parameter.ParameterContextManager;
|
||||
|
@ -251,15 +252,15 @@ public class StandardVersionedComponentSynchronizer implements VersionedComponen
|
|||
});
|
||||
|
||||
for (final ComponentNode extension : createdExtensions) {
|
||||
final ControllerServiceFactory serviceFactory = new StandardControllerServiceFactory(context.getExtensionManager(), context.getFlowManager(),
|
||||
context.getControllerServiceProvider(), extension);
|
||||
|
||||
if (extension instanceof final ProcessorNode processor) {
|
||||
final ProcessContext migrationContext = context.getProcessContextFactory().apply(processor);
|
||||
processor.migrateConfiguration(migrationContext);
|
||||
processor.migrateConfiguration(serviceFactory);
|
||||
} else if (extension instanceof final ControllerServiceNode service) {
|
||||
final ConfigurationContext migrationContext = context.getConfigurationContextFactory().apply(service);
|
||||
service.migrateConfiguration(migrationContext);
|
||||
service.migrateConfiguration(serviceFactory);
|
||||
} else if (extension instanceof final ReportingTaskNode task) {
|
||||
final ConfigurationContext migrationContext = context.getConfigurationContextFactory().apply(task);
|
||||
task.migrateConfiguration(migrationContext);
|
||||
task.migrateConfiguration(serviceFactory);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -43,23 +43,19 @@ public class DefaultComponentScheduler extends AbstractComponentScheduler {
|
|||
}
|
||||
|
||||
switch (component.getConnectableType()) {
|
||||
case PROCESSOR: {
|
||||
case PROCESSOR -> {
|
||||
final ProcessorNode processorNode = (ProcessorNode) component;
|
||||
processorNode.getProcessGroup().startProcessor(processorNode, false);
|
||||
break;
|
||||
}
|
||||
case INPUT_PORT: {
|
||||
case INPUT_PORT -> {
|
||||
final Port port = (Port) component;
|
||||
port.getProcessGroup().startInputPort(port);
|
||||
break;
|
||||
}
|
||||
case OUTPUT_PORT: {
|
||||
case OUTPUT_PORT -> {
|
||||
final Port port = (Port) component;
|
||||
port.getProcessGroup().startOutputPort(port);
|
||||
break;
|
||||
}
|
||||
case REMOTE_INPUT_PORT:
|
||||
case REMOTE_OUTPUT_PORT: {
|
||||
case REMOTE_INPUT_PORT, REMOTE_OUTPUT_PORT -> {
|
||||
final RemoteGroupPort port = (RemoteGroupPort) component;
|
||||
port.getRemoteProcessGroup().startTransmitting(port);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,236 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.nifi.migration;
|
||||
|
||||
import org.apache.nifi.bundle.Bundle;
|
||||
import org.apache.nifi.bundle.BundleCoordinate;
|
||||
import org.apache.nifi.components.validation.ValidationStatus;
|
||||
import org.apache.nifi.controller.ComponentNode;
|
||||
import org.apache.nifi.controller.ControllerService;
|
||||
import org.apache.nifi.controller.flow.FlowManager;
|
||||
import org.apache.nifi.controller.service.ControllerServiceNode;
|
||||
import org.apache.nifi.controller.service.ControllerServiceProvider;
|
||||
import org.apache.nifi.flow.ExecutionEngine;
|
||||
import org.apache.nifi.groups.ProcessGroup;
|
||||
import org.apache.nifi.nar.ExtensionManager;
|
||||
import org.apache.nifi.nar.NarClassLoadersHolder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.UUID;
|
||||
|
||||
public class StandardControllerServiceFactory implements ControllerServiceFactory {
|
||||
private static final Logger logger = LoggerFactory.getLogger(StandardControllerServiceFactory.class);
|
||||
|
||||
private final ExtensionManager extensionManager;
|
||||
private final FlowManager flowManager;
|
||||
private final ControllerServiceProvider serviceProvider;
|
||||
private final ComponentNode creator;
|
||||
|
||||
public StandardControllerServiceFactory(final ExtensionManager extensionManager, final FlowManager flowManager, final ControllerServiceProvider serviceProvider,
|
||||
final ComponentNode creator) {
|
||||
this.extensionManager = extensionManager;
|
||||
this.flowManager = flowManager;
|
||||
this.serviceProvider = serviceProvider;
|
||||
this.creator = creator;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ControllerServiceCreationDetails getCreationDetails(final String implementationClassName, final Map<String, String> propertyValues) {
|
||||
final String serviceId = determineServiceId(implementationClassName, propertyValues);
|
||||
final ControllerServiceNode existingNode = flowManager.getControllerServiceNode(serviceId);
|
||||
if (existingNode != null) {
|
||||
final Class<? extends ControllerService> serviceClass = existingNode.getControllerServiceImplementation().getClass();
|
||||
if (isImplementation(serviceClass, implementationClassName)) {
|
||||
logger.debug("Found existing Controller Service with ID {} for implementation {}", serviceId, implementationClassName);
|
||||
return alreadyExists(existingNode, implementationClassName);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Determined from provided implementation classname, Process Group of creator," +
|
||||
"and provided property values that the Controller Service to create should have an ID of %s. However," +
|
||||
"there already exists a Controller Service with that ID (%s) and it is not of the correct type: %s",
|
||||
serviceId, existingNode, implementationClassName));
|
||||
}
|
||||
|
||||
// There is no match. Create a new Controller Service
|
||||
final Bundle bundle = determineBundle(implementationClassName);
|
||||
return toBeCreated(serviceId, implementationClassName, bundle.getBundleDetails().getCoordinate(), propertyValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ControllerServiceNode create(final ControllerServiceCreationDetails creationDetails) {
|
||||
final ControllerServiceNode serviceNode = flowManager.createControllerService(creationDetails.type(), creationDetails.serviceIdentifier(), creationDetails.serviceBundleCoordinate(),
|
||||
Collections.emptySet(), true, true, null);
|
||||
|
||||
final Optional<ProcessGroup> group = creator.getParentProcessGroup();
|
||||
if (group.isPresent()) {
|
||||
group.get().addControllerService(serviceNode);
|
||||
logger.info("Created {} in {} as a step in the migration of {}", serviceNode, group, creator);
|
||||
} else {
|
||||
flowManager.addRootControllerService(serviceNode);
|
||||
logger.info("Created {} as a Controller-Level Controller Service as a step in the migration of {}", serviceNode, creator);
|
||||
}
|
||||
|
||||
serviceNode.setProperties(creationDetails.serviceProperties());
|
||||
|
||||
final ControllerServiceFactory serviceFactory = new StandardControllerServiceFactory(extensionManager, flowManager, serviceProvider, serviceNode);
|
||||
serviceNode.migrateConfiguration(serviceFactory);
|
||||
|
||||
if (isEnable()) {
|
||||
final ValidationStatus validationStatus = serviceNode.performValidation();
|
||||
if (validationStatus == ValidationStatus.VALID) {
|
||||
serviceProvider.enableControllerService(serviceNode);
|
||||
logger.info("Enabled newly created Controller Service {}", serviceNode);
|
||||
}
|
||||
}
|
||||
|
||||
return serviceNode;
|
||||
}
|
||||
|
||||
private boolean isEnable() {
|
||||
// Do not enable any Controller Services if it's added to a stateless group. Let the stateless group handle
|
||||
// the lifecycle of Controller Services on its own.
|
||||
final Optional<ProcessGroup> optionalGroup = creator.getParentProcessGroup();
|
||||
if (optionalGroup.isPresent()) {
|
||||
final ExecutionEngine executionEngine = optionalGroup.get().resolveExecutionEngine();
|
||||
if (executionEngine == ExecutionEngine.STATELESS) {
|
||||
logger.debug("Will not enable newly created Controller Services because parent group {} is stateless", optionalGroup.get());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private ControllerServiceCreationDetails toBeCreated(final String serviceId, final String type, final BundleCoordinate bundleCoordinate, final Map<String, String> propertyValues) {
|
||||
return new ControllerServiceCreationDetails(serviceId, type, bundleCoordinate, propertyValues, ControllerServiceCreationDetails.CreationState.SERVICE_TO_BE_CREATED);
|
||||
}
|
||||
|
||||
private ControllerServiceCreationDetails alreadyExists(final ControllerServiceNode serviceNode, final String type) {
|
||||
final Map<String, String> propertyValues = new HashMap<>();
|
||||
serviceNode.getRawPropertyValues().forEach((key, value) -> propertyValues.put(key.getName(), value));
|
||||
|
||||
return new ControllerServiceCreationDetails(serviceNode.getIdentifier(),
|
||||
type,
|
||||
serviceNode.getBundleCoordinate(),
|
||||
propertyValues,
|
||||
ControllerServiceCreationDetails.CreationState.SERVICE_ALREADY_EXISTS);
|
||||
}
|
||||
|
||||
private boolean isImplementation(final Class<?> clazz, final String className) {
|
||||
if (className.equals(clazz.getName())) {
|
||||
return true;
|
||||
}
|
||||
|
||||
final Class<?> superClass = clazz.getSuperclass();
|
||||
if (Object.class.equals(superClass)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isImplementation(superClass, className);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a deterministic UUID for the Controller Service based on the Process Group that the creator resides in,
|
||||
* if any, the implementation class name, and the given properties
|
||||
* @param className the classname of the Controller Service
|
||||
* @param propertyValues the property values
|
||||
* @return a UUID for the Controller Service
|
||||
*/
|
||||
// Visible for testing
|
||||
protected String determineServiceId(final String className, final Map<String, String> propertyValues) {
|
||||
final SortedMap<String, String> sortedProperties = new TreeMap<>(propertyValues);
|
||||
final String componentDescription = creator.getProcessGroupIdentifier() + className + sortedProperties;
|
||||
final String serviceId = UUID.nameUUIDFromBytes(componentDescription.getBytes(StandardCharsets.UTF_8)).toString();
|
||||
logger.debug("For Controller Service of type {} created from {} will use UUID {}", className, creator, serviceId);
|
||||
return serviceId;
|
||||
}
|
||||
|
||||
private Bundle determineBundle(final String implementationClassName) {
|
||||
logger.debug("Determining which Bundle should be used to create Controller Service of type {} for {}", implementationClassName, creator);
|
||||
|
||||
// Get all available bundles for the given implementation type
|
||||
final List<Bundle> availableBundles = extensionManager.getBundles(implementationClassName);
|
||||
|
||||
// If no versions are available, throw an Exception
|
||||
if (availableBundles.isEmpty()) {
|
||||
throw new IllegalArgumentException("Cannot create Controller Service because the implementation Class [%s] is not a known Controller Service".formatted(implementationClassName));
|
||||
}
|
||||
|
||||
// If exactly 1 version is available, use it.
|
||||
if (availableBundles.size() == 1) {
|
||||
logger.debug("Found exactly 1 Bundle for Controller Service of type {}: {}", implementationClassName, availableBundles.get(0));
|
||||
return availableBundles.get(0);
|
||||
}
|
||||
|
||||
// If there's a version that's in the same bundle as the creator, use it.
|
||||
logger.debug("There are {} available Bundles for Controller Service of type {}", availableBundles.size(), implementationClassName);
|
||||
final Optional<Bundle> sameBundleMatch = availableBundles.stream()
|
||||
.filter(bundle -> bundle.getBundleDetails().getCoordinate().equals(creator.getBundleCoordinate()))
|
||||
.findFirst();
|
||||
|
||||
if (sameBundleMatch.isPresent()) {
|
||||
logger.debug("Found one Bundle that contains the Controller Service implementation {} that also contains the creator ({}). Will use it: {}",
|
||||
implementationClassName, creator, sameBundleMatch.get());
|
||||
return sameBundleMatch.get();
|
||||
}
|
||||
|
||||
// If there's a version that is the same as the creator's version, use it.
|
||||
final List<Bundle> sameVersionBundleMatch = availableBundles.stream()
|
||||
.filter(bundle -> bundle.getBundleDetails().getCoordinate().getVersion().equals(creator.getBundleCoordinate().getVersion()))
|
||||
.toList();
|
||||
|
||||
if (sameVersionBundleMatch.size() == 1) {
|
||||
logger.debug("Found one Bundle that contains the Controller Service implementation {} that also contains the same version as the creator ({}). Will use it: {}",
|
||||
implementationClassName, creator, sameVersionBundleMatch.get(0));
|
||||
return sameVersionBundleMatch.get(0);
|
||||
}
|
||||
|
||||
// If there's a version that is the same as the framework version, use it.
|
||||
final Bundle frameworkBundle = getFrameworkBundle();
|
||||
final String frameworkVersion = frameworkBundle.getBundleDetails().getCoordinate().getVersion();
|
||||
final Optional<Bundle> sameVersionAsFrameworkMatch = availableBundles.stream()
|
||||
.filter(bundle -> bundle.getBundleDetails().getCoordinate().getVersion().equals(frameworkVersion))
|
||||
.findFirst();
|
||||
|
||||
if (sameVersionAsFrameworkMatch.isPresent()) {
|
||||
logger.debug("Found one Bundle that contains the Controller Service implementation {} that also contains the same version as the NiFi Framework. Will use it: {}",
|
||||
implementationClassName, sameVersionAsFrameworkMatch.get());
|
||||
return sameVersionAsFrameworkMatch.get();
|
||||
}
|
||||
|
||||
// Unable to determine which version to use. Throw an Exception.
|
||||
logger.debug("Could not find a suitable Bundle for creating Controller Service implementation {} from creator {}", implementationClassName, creator);
|
||||
throw new IllegalArgumentException(String.format("There are %s versions of the %s Controller Service, but the appropriate version could not be resolved " +
|
||||
"from extension %s that is attempting to create the Controller Service", availableBundles.size(), implementationClassName, creator));
|
||||
}
|
||||
|
||||
// Visible for testing
|
||||
protected Bundle getFrameworkBundle() {
|
||||
return NarClassLoadersHolder.getInstance().getFrameworkBundle();
|
||||
}
|
||||
}
|
|
@ -20,35 +20,49 @@ package org.apache.nifi.migration;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class StandardPropertyConfiguration implements PropertyConfiguration {
|
||||
private static final Logger logger = LoggerFactory.getLogger(StandardPropertyConfiguration.class);
|
||||
|
||||
private final Map<String, String> properties;
|
||||
private final Map<String, String> effectiveProperties;
|
||||
private final Map<String, String> rawProperties;
|
||||
private final Function<String, String> effectiveValueResolver;
|
||||
private final String componentDescription;
|
||||
private final ControllerServiceFactory controllerServiceFactory;
|
||||
private final List<ControllerServiceCreationDetails> createdServices = new ArrayList<>();
|
||||
private boolean modified = false;
|
||||
|
||||
public StandardPropertyConfiguration(final Map<String, String> configuration, final String componentDescription) {
|
||||
public StandardPropertyConfiguration(final Map<String, String> effectivePropertyValues, final Map<String, String> rawPropertyValues,
|
||||
final Function<String, String> effectiveValueResolver, final String componentDescription, final ControllerServiceFactory controllerServiceFactory) {
|
||||
// Create a copy of the configuration so that it can be manipulated. Use LinkedHashMap to preserve order
|
||||
this.properties = new LinkedHashMap<>(configuration);
|
||||
this.effectiveProperties = new LinkedHashMap<>(effectivePropertyValues);
|
||||
this.rawProperties = new LinkedHashMap<>(rawPropertyValues);
|
||||
this.effectiveValueResolver = effectiveValueResolver;
|
||||
this.componentDescription = componentDescription;
|
||||
this.controllerServiceFactory = controllerServiceFactory;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean renameProperty(final String propertyName, final String newName) {
|
||||
if (!properties.containsKey(propertyName)) {
|
||||
if (!effectiveProperties.containsKey(propertyName)) {
|
||||
logger.debug("Will not rename property [{}] for [{}] because the property is not known", propertyName, componentDescription);
|
||||
return false;
|
||||
}
|
||||
|
||||
final String propertyValue = properties.remove(propertyName);
|
||||
properties.put(newName, propertyValue);
|
||||
final String effectivePropertyValue = effectiveProperties.remove(propertyName);
|
||||
effectiveProperties.put(newName, effectivePropertyValue);
|
||||
|
||||
final String rawPropertyValue = rawProperties.remove(propertyName);
|
||||
rawProperties.put(newName, rawPropertyValue);
|
||||
|
||||
modified = true;
|
||||
logger.info("Renamed property [{}] to [{}] for [{}]", propertyName, newName, componentDescription);
|
||||
|
||||
|
@ -57,12 +71,13 @@ public class StandardPropertyConfiguration implements PropertyConfiguration {
|
|||
|
||||
@Override
|
||||
public boolean removeProperty(final String propertyName) {
|
||||
if (!properties.containsKey(propertyName)) {
|
||||
if (!effectiveProperties.containsKey(propertyName)) {
|
||||
logger.debug("Will not remove property [{}] from [{}] because the property is not known", propertyName, componentDescription);
|
||||
return false;
|
||||
}
|
||||
|
||||
properties.remove(propertyName);
|
||||
effectiveProperties.remove(propertyName);
|
||||
rawProperties.remove(propertyName);
|
||||
modified = true;
|
||||
logger.info("Removed property [{}] from [{}]", propertyName, componentDescription);
|
||||
|
||||
|
@ -71,22 +86,26 @@ public class StandardPropertyConfiguration implements PropertyConfiguration {
|
|||
|
||||
@Override
|
||||
public boolean hasProperty(final String propertyName) {
|
||||
return properties.containsKey(propertyName);
|
||||
return effectiveProperties.containsKey(propertyName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isPropertySet(final String propertyName) {
|
||||
return properties.get(propertyName) != null;
|
||||
// Use Effective Properties here because the value may be set to #{MY_PARAM} but if parameter MY_PARAM is not set, the property should be considered unset.
|
||||
return effectiveProperties.get(propertyName) != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(final String propertyName, final String propertyValue) {
|
||||
final String previousValue = properties.put(propertyName, propertyValue);
|
||||
final String previousValue = rawProperties.put(propertyName, propertyValue);
|
||||
if (Objects.equals(previousValue, propertyValue)) {
|
||||
logger.debug("Will not update property [{}] for [{}] because the proposed value and the current value are the same", propertyName, componentDescription);
|
||||
return;
|
||||
}
|
||||
|
||||
final String effectiveValue = effectiveValueResolver.apply(propertyValue);
|
||||
effectiveProperties.put(propertyName, effectiveValue);
|
||||
|
||||
modified = true;
|
||||
if (previousValue == null) {
|
||||
logger.info("Updated property [{}] for [{}], which was previously unset", propertyName, componentDescription);
|
||||
|
@ -97,15 +116,40 @@ public class StandardPropertyConfiguration implements PropertyConfiguration {
|
|||
|
||||
@Override
|
||||
public Optional<String> getPropertyValue(final String propertyName) {
|
||||
return Optional.ofNullable(properties.get(propertyName));
|
||||
return Optional.ofNullable(effectiveProperties.get(propertyName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<String> getRawPropertyValue(final String propertyName) {
|
||||
return Optional.ofNullable(rawProperties.get(propertyName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getProperties() {
|
||||
return Collections.unmodifiableMap(properties);
|
||||
return Collections.unmodifiableMap(effectiveProperties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getRawProperties() {
|
||||
return Collections.unmodifiableMap(rawProperties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String createControllerService(final String implementationClassName, final Map<String, String> serviceProperties) {
|
||||
final ControllerServiceCreationDetails creationDetails = controllerServiceFactory.getCreationDetails(implementationClassName, serviceProperties);
|
||||
if (creationDetails.creationState() == ControllerServiceCreationDetails.CreationState.SERVICE_TO_BE_CREATED) {
|
||||
modified = true;
|
||||
createdServices.add(creationDetails);
|
||||
}
|
||||
|
||||
return creationDetails.serviceIdentifier();
|
||||
}
|
||||
|
||||
public boolean isModified() {
|
||||
return modified;
|
||||
}
|
||||
|
||||
public List<ControllerServiceCreationDetails> getCreatedServices() {
|
||||
return createdServices;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,161 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.nifi.migration;
|
||||
|
||||
import org.apache.nifi.bundle.Bundle;
|
||||
import org.apache.nifi.bundle.BundleCoordinate;
|
||||
import org.apache.nifi.bundle.BundleDetails;
|
||||
import org.apache.nifi.controller.ComponentNode;
|
||||
import org.apache.nifi.controller.flow.FlowManager;
|
||||
import org.apache.nifi.controller.service.ControllerServiceProvider;
|
||||
import org.apache.nifi.nar.ExtensionManager;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class TestStandardControllerServiceFactory {
|
||||
private static final String VERSION_2 = "2.0.0";
|
||||
private static final String FRAMEWORK_VERSION = "8.7.6";
|
||||
private static final String GROUP_ID = "org.apache.nifi";
|
||||
private static final String LONE_BUNDLE = "lone-bundle";
|
||||
private static final String IMPL_CLASS = "org.apache.nifi.auth.AuthorizerService";
|
||||
|
||||
private StandardControllerServiceFactory factory;
|
||||
private List<Bundle> bundles;
|
||||
private ComponentNode creator;
|
||||
private ControllerServiceProvider serviceProvider;
|
||||
|
||||
@BeforeEach
|
||||
public void setup() {
|
||||
final ExtensionManager extensionManager = mock(ExtensionManager.class);
|
||||
final FlowManager flowManager = mock(FlowManager.class);
|
||||
creator = mock(ComponentNode.class);
|
||||
|
||||
bundles = new ArrayList<>();
|
||||
bundles.add(createBundle(LONE_BUNDLE, VERSION_2));
|
||||
when(extensionManager.getBundles(IMPL_CLASS)).thenAnswer(invocation -> bundles);
|
||||
|
||||
serviceProvider = mock(ControllerServiceProvider.class);
|
||||
|
||||
final Bundle frameworkBundle = createBundle("framework-nar", FRAMEWORK_VERSION);
|
||||
factory = new StandardControllerServiceFactory(extensionManager, flowManager, serviceProvider, creator) {
|
||||
@Override
|
||||
protected Bundle getFrameworkBundle() {
|
||||
return frameworkBundle;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBundleDetermination() {
|
||||
final Map<String, String> serviceProperties = Map.of("PropertyA", "ValueA");
|
||||
final ControllerServiceCreationDetails details = factory.getCreationDetails(IMPL_CLASS, serviceProperties);
|
||||
assertNotNull(details);
|
||||
assertEquals(IMPL_CLASS, details.type());
|
||||
assertEquals(serviceProperties, details.serviceProperties());
|
||||
|
||||
// Test lone bundle
|
||||
final BundleCoordinate coordinate = details.serviceBundleCoordinate();
|
||||
assertEquals("%s:%s:%s".formatted(GROUP_ID, LONE_BUNDLE, VERSION_2), coordinate.getCoordinate());
|
||||
|
||||
// Test no bundles
|
||||
bundles.clear();
|
||||
Assertions.assertThrows(IllegalArgumentException.class, () -> factory.getCreationDetails(IMPL_CLASS, serviceProperties));
|
||||
|
||||
// Test matching creator bundle
|
||||
final BundleCoordinate coordinateA = createCoordinate("bundle-A", VERSION_2);
|
||||
final BundleCoordinate coordinateB = createCoordinate("bundle-B", VERSION_2);
|
||||
when(creator.getBundleCoordinate()).thenReturn(coordinateB);
|
||||
bundles.add(createBundle(coordinateA));
|
||||
bundles.add(createBundle(coordinateB));
|
||||
assertEquals(coordinateB, factory.getCreationDetails(IMPL_CLASS, serviceProperties).serviceBundleCoordinate());
|
||||
|
||||
// Test matching creator version with two options
|
||||
final BundleCoordinate coordinateC = createCoordinate("bundle-C", VERSION_2);
|
||||
when(creator.getBundleCoordinate()).thenReturn(coordinateC);
|
||||
Assertions.assertThrows(IllegalArgumentException.class, () -> factory.getCreationDetails(IMPL_CLASS, serviceProperties));
|
||||
|
||||
// Test matching creator version with only 1 option
|
||||
bundles.remove(createBundle(coordinateB));
|
||||
assertEquals(coordinateA, factory.getCreationDetails(IMPL_CLASS, serviceProperties).serviceBundleCoordinate());
|
||||
|
||||
bundles.clear();
|
||||
final BundleCoordinate frameworkVersionCoordinate = createCoordinate("bundle-X", FRAMEWORK_VERSION);
|
||||
bundles.add(createBundle(frameworkVersionCoordinate));
|
||||
assertEquals(frameworkVersionCoordinate, factory.getCreationDetails(IMPL_CLASS, serviceProperties).serviceBundleCoordinate());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testServiceIdDeterministic() {
|
||||
final Map<String, String> serviceProperties = Map.of("PropertyA", "ValueA");
|
||||
final String initialServiceId = factory.determineServiceId(IMPL_CLASS, serviceProperties);
|
||||
assertNotNull(initialServiceId);
|
||||
|
||||
// Create the service several times, ensuring that the same ID is returned each time.
|
||||
for (int i=0; i < 5; i++) {
|
||||
assertEquals(initialServiceId, factory.determineServiceId(IMPL_CLASS, serviceProperties));
|
||||
}
|
||||
|
||||
// Service ID should change if the component's group changes
|
||||
when(creator.getProcessGroupIdentifier()).thenReturn("new-id");
|
||||
final String secondGroupId = factory.determineServiceId(IMPL_CLASS, serviceProperties);
|
||||
assertNotNull(secondGroupId);
|
||||
|
||||
// Ensure that with the same parameters we keep getting the same value
|
||||
for (int i=0; i < 5; i++) {
|
||||
assertEquals(secondGroupId, factory.determineServiceId(IMPL_CLASS, serviceProperties));
|
||||
}
|
||||
|
||||
final String thirdId = factory.determineServiceId(IMPL_CLASS, Map.of());
|
||||
assertNotNull(thirdId);
|
||||
|
||||
final String fourthId = factory.determineServiceId(IMPL_CLASS, Map.of("Another", "Value"));
|
||||
assertNotNull(fourthId);
|
||||
|
||||
// Assert all IDs are unique
|
||||
assertEquals(4, Set.of(initialServiceId, secondGroupId, thirdId, fourthId).size());
|
||||
}
|
||||
|
||||
|
||||
private BundleCoordinate createCoordinate(final String artifactId, final String version) {
|
||||
return new BundleCoordinate(GROUP_ID, artifactId, version);
|
||||
}
|
||||
|
||||
private Bundle createBundle(final BundleCoordinate coordinate) {
|
||||
final BundleDetails details = new BundleDetails.Builder()
|
||||
.coordinate(coordinate)
|
||||
.workingDir(new File("target/work"))
|
||||
.build();
|
||||
return new Bundle(details, getClass().getClassLoader());
|
||||
}
|
||||
|
||||
private Bundle createBundle(final String artifactId, final String version) {
|
||||
return createBundle(createCoordinate(artifactId, version));
|
||||
}
|
||||
}
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
package org.apache.nifi.migration;
|
||||
|
||||
import org.apache.nifi.controller.service.ControllerServiceNode;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
|
@ -41,9 +42,22 @@ public class TestStandardPropertyConfiguration {
|
|||
originalProperties.put("b", "B");
|
||||
originalProperties.put("c", null);
|
||||
|
||||
config = new StandardPropertyConfiguration(originalProperties, "Test Component");
|
||||
final ControllerServiceFactory controllerServiceFactory = new ControllerServiceFactory() {
|
||||
@Override
|
||||
public ControllerServiceCreationDetails getCreationDetails(final String implementationClassName, final Map<String, String> propertyValues) {
|
||||
return new ControllerServiceCreationDetails("id", implementationClassName, null, propertyValues, ControllerServiceCreationDetails.CreationState.SERVICE_TO_BE_CREATED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ControllerServiceNode create(final ControllerServiceCreationDetails creationDetails) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
config = new StandardPropertyConfiguration(originalProperties, originalProperties, raw -> raw, "Test Component", controllerServiceFactory);
|
||||
}
|
||||
|
||||
// TODO: Test Raw vs. Effective values
|
||||
|
||||
@Test
|
||||
public void testGetOperations() {
|
||||
|
|
|
@ -652,6 +652,18 @@ public abstract class AbstractComponentNode implements ComponentNode {
|
|||
return getPropertyValues((descriptor, config) -> getConfigValue(config, isResolveParameter(descriptor, config)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts from a Map of PropertyDescriptor to value, to a Map of property name to value
|
||||
*
|
||||
* @param propertyValues the property values to convert
|
||||
* @return a Map whose keys are the names of the properties instead of descriptors
|
||||
*/
|
||||
public Map<String, String> toPropertyNameMap(final Map<PropertyDescriptor, String> propertyValues) {
|
||||
final Map<String, String> converted = new HashMap<>();
|
||||
propertyValues.forEach((key, value) -> converted.put(key.getName(), value));
|
||||
return converted;
|
||||
}
|
||||
|
||||
private Map<PropertyDescriptor, String> getPropertyValues(final BiFunction<PropertyDescriptor, PropertyConfiguration, String> valueFunction) {
|
||||
try (final NarCloseable narCloseable = NarCloseable.withComponentNarLoader(extensionManager, getComponent().getClass(), getIdentifier())) {
|
||||
final List<PropertyDescriptor> supported = getComponent().getPropertyDescriptors();
|
||||
|
@ -691,6 +703,21 @@ public abstract class AbstractComponentNode implements ComponentNode {
|
|||
return value;
|
||||
}
|
||||
|
||||
protected String mapRawValueToEffectiveValue(final String rawValue) {
|
||||
if (rawValue == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final ParameterLookup parameterLookup = getParameterLookup();
|
||||
if (parameterLookup == null) {
|
||||
return rawValue;
|
||||
}
|
||||
|
||||
final ParameterTokenList parameterTokenList = new ExpressionLanguageAgnosticParameterParser().parseTokens(rawValue);
|
||||
final String effectiveValue = parameterTokenList.substitute(parameterLookup);
|
||||
return effectiveValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getRawPropertyValue(final PropertyDescriptor property) {
|
||||
return getProperty(property).getRawValue();
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.nifi.controller.scheduling.SchedulingAgent;
|
|||
import org.apache.nifi.controller.service.ControllerServiceProvider;
|
||||
import org.apache.nifi.logging.ComponentLog;
|
||||
import org.apache.nifi.logging.LogLevel;
|
||||
import org.apache.nifi.migration.ControllerServiceFactory;
|
||||
import org.apache.nifi.nar.ExtensionManager;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.Processor;
|
||||
|
@ -297,6 +298,6 @@ public abstract class ProcessorNode extends AbstractComponentNode implements Con
|
|||
|
||||
public abstract void notifyPrimaryNodeChanged(PrimaryNodeState primaryNodeState, LifecycleState lifecycleState);
|
||||
|
||||
public abstract void migrateConfiguration(ProcessContext context);
|
||||
public abstract void migrateConfiguration(ControllerServiceFactory serviceFactory);
|
||||
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ import org.apache.nifi.components.ConfigVerificationResult;
|
|||
import org.apache.nifi.controller.scheduling.LifecycleState;
|
||||
import org.apache.nifi.controller.service.ControllerServiceNode;
|
||||
import org.apache.nifi.logging.ComponentLog;
|
||||
import org.apache.nifi.migration.ControllerServiceFactory;
|
||||
import org.apache.nifi.nar.ExtensionManager;
|
||||
import org.apache.nifi.reporting.ReportingContext;
|
||||
import org.apache.nifi.reporting.ReportingTask;
|
||||
|
@ -137,5 +138,5 @@ public interface ReportingTaskNode extends ComponentNode {
|
|||
|
||||
void notifyPrimaryNodeChanged(PrimaryNodeState primaryNodeState, LifecycleState lifecycleState);
|
||||
|
||||
void migrateConfiguration(ConfigurationContext context);
|
||||
void migrateConfiguration(ControllerServiceFactory controllerServiceFactory);
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.nifi.groups.ProcessGroup;
|
|||
import org.apache.nifi.logging.ComponentLog;
|
||||
import org.apache.nifi.logging.GroupedComponent;
|
||||
import org.apache.nifi.logging.LogLevel;
|
||||
import org.apache.nifi.migration.ControllerServiceFactory;
|
||||
import org.apache.nifi.nar.ExtensionManager;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -256,5 +257,6 @@ public interface ControllerServiceNode extends ComponentNode, VersionedComponent
|
|||
|
||||
void notifyPrimaryNodeChanged(PrimaryNodeState primaryNodeState);
|
||||
|
||||
void migrateConfiguration(ConfigurationContext context);
|
||||
void migrateConfiguration(ControllerServiceFactory serviceFactory);
|
||||
|
||||
}
|
||||
|
|
|
@ -20,13 +20,13 @@ package org.apache.nifi.groups;
|
|||
import org.apache.nifi.connectable.Connectable;
|
||||
import org.apache.nifi.connectable.Port;
|
||||
import org.apache.nifi.controller.ProcessorNode;
|
||||
import org.apache.nifi.controller.ReportingTaskNode;
|
||||
import org.apache.nifi.controller.service.ControllerServiceNode;
|
||||
import org.apache.nifi.controller.service.ControllerServiceProvider;
|
||||
import org.apache.nifi.flow.ExecutionEngine;
|
||||
import org.apache.nifi.flow.ScheduledState;
|
||||
import org.apache.nifi.registry.flow.mapping.VersionedComponentStateLookup;
|
||||
import org.apache.nifi.remote.RemoteGroupPort;
|
||||
import org.apache.nifi.controller.ReportingTaskNode;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -137,60 +137,35 @@ public abstract class AbstractComponentScheduler implements ComponentScheduler {
|
|||
private ScheduledState getScheduledState(final Connectable component) {
|
||||
// Use the State Lookup to get the state, if possible. If, for some reason, it doesn't
|
||||
// provide us a state (which should never happen) just fall back to the component's scheduled state.
|
||||
switch (component.getConnectableType()) {
|
||||
case INPUT_PORT:
|
||||
case OUTPUT_PORT:
|
||||
case REMOTE_INPUT_PORT:
|
||||
case REMOTE_OUTPUT_PORT:
|
||||
return stateLookup.getState((Port) component);
|
||||
case PROCESSOR:
|
||||
return stateLookup.getState((ProcessorNode) component);
|
||||
case FUNNEL:
|
||||
return ScheduledState.RUNNING;
|
||||
}
|
||||
return switch (component.getConnectableType()) {
|
||||
case INPUT_PORT, OUTPUT_PORT, REMOTE_INPUT_PORT, REMOTE_OUTPUT_PORT -> stateLookup.getState((Port) component);
|
||||
case PROCESSOR -> stateLookup.getState((ProcessorNode) component);
|
||||
case FUNNEL -> ScheduledState.RUNNING;
|
||||
default -> switch (component.getScheduledState()) {
|
||||
case DISABLED -> ScheduledState.DISABLED;
|
||||
case RUN_ONCE, STOPPED, STOPPING -> ScheduledState.ENABLED;
|
||||
default -> ScheduledState.RUNNING;
|
||||
};
|
||||
};
|
||||
|
||||
switch (component.getScheduledState()) {
|
||||
case DISABLED:
|
||||
return ScheduledState.DISABLED;
|
||||
case RUN_ONCE:
|
||||
case STOPPED:
|
||||
case STOPPING:
|
||||
return ScheduledState.ENABLED;
|
||||
case RUNNING:
|
||||
case STARTING:
|
||||
default:
|
||||
return ScheduledState.RUNNING;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void enable(final Connectable component) {
|
||||
final ProcessGroup group = component.getProcessGroup();
|
||||
switch (component.getConnectableType()) {
|
||||
case INPUT_PORT:
|
||||
group.enableInputPort((Port) component);
|
||||
break;
|
||||
case OUTPUT_PORT:
|
||||
group.enableOutputPort((Port) component);
|
||||
break;
|
||||
case PROCESSOR:
|
||||
group.enableProcessor((ProcessorNode) component);
|
||||
break;
|
||||
case INPUT_PORT -> group.enableInputPort((Port) component);
|
||||
case OUTPUT_PORT -> group.enableOutputPort((Port) component);
|
||||
case PROCESSOR -> group.enableProcessor((ProcessorNode) component);
|
||||
}
|
||||
}
|
||||
|
||||
private void disable(final Connectable component) {
|
||||
final ProcessGroup group = component.getProcessGroup();
|
||||
switch (component.getConnectableType()) {
|
||||
case INPUT_PORT:
|
||||
group.disableInputPort((Port) component);
|
||||
break;
|
||||
case OUTPUT_PORT:
|
||||
group.disableOutputPort((Port) component);
|
||||
break;
|
||||
case PROCESSOR:
|
||||
group.disableProcessor((ProcessorNode) component);
|
||||
break;
|
||||
case INPUT_PORT -> group.disableInputPort((Port) component);
|
||||
case OUTPUT_PORT -> group.disableOutputPort((Port) component);
|
||||
case PROCESSOR -> group.disableProcessor((ProcessorNode) component);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -210,20 +185,13 @@ public abstract class AbstractComponentScheduler implements ComponentScheduler {
|
|||
public void stopComponent(final Connectable component) {
|
||||
final ProcessGroup processGroup = component.getProcessGroup();
|
||||
switch (component.getConnectableType()) {
|
||||
case INPUT_PORT:
|
||||
processGroup.stopInputPort((Port) component);
|
||||
break;
|
||||
case OUTPUT_PORT:
|
||||
processGroup.stopOutputPort((Port) component);
|
||||
break;
|
||||
case PROCESSOR:
|
||||
processGroup.stopProcessor((ProcessorNode) component);
|
||||
break;
|
||||
case REMOTE_INPUT_PORT:
|
||||
case REMOTE_OUTPUT_PORT:
|
||||
case INPUT_PORT -> processGroup.stopInputPort((Port) component);
|
||||
case OUTPUT_PORT -> processGroup.stopOutputPort((Port) component);
|
||||
case PROCESSOR -> processGroup.stopProcessor((ProcessorNode) component);
|
||||
case REMOTE_INPUT_PORT, REMOTE_OUTPUT_PORT -> {
|
||||
final RemoteGroupPort port = (RemoteGroupPort) component;
|
||||
port.getRemoteProcessGroup().stopTransmitting(port);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.nifi.migration;
|
||||
|
||||
import org.apache.nifi.bundle.BundleCoordinate;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public record ControllerServiceCreationDetails(String serviceIdentifier, String type, BundleCoordinate serviceBundleCoordinate,
|
||||
Map<String, String> serviceProperties, CreationState creationState) {
|
||||
|
||||
enum CreationState {
|
||||
SERVICE_ALREADY_EXISTS,
|
||||
|
||||
SERVICE_TO_BE_CREATED;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.nifi.migration;
|
||||
|
||||
import org.apache.nifi.controller.service.ControllerServiceNode;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public interface ControllerServiceFactory {
|
||||
|
||||
/**
|
||||
* Determines whether or not a Controller Service exists in the proper scope with the given implementation and property values.
|
||||
* Provides all of the details that are necessary in order to create or reference the Controller Service with the given implementation
|
||||
* and property values
|
||||
*
|
||||
* @param implementationClassName the fully qualified classname of the Controller Service to create or reference
|
||||
* @param propertyValues the property values that should be associated with the Controller Service
|
||||
* @return the details necessary in order to reference or create the Controller Service
|
||||
*/
|
||||
ControllerServiceCreationDetails getCreationDetails(String implementationClassName, Map<String, String> propertyValues);
|
||||
|
||||
/**
|
||||
* Creates a Controller Service that is described by the given details
|
||||
*
|
||||
* @param creationDetails the details of the service to create
|
||||
* @return the newly created Controller Service
|
||||
*/
|
||||
ControllerServiceNode create(ControllerServiceCreationDetails creationDetails);
|
||||
}
|
|
@ -28,7 +28,6 @@ import org.apache.nifi.connectable.Connectable;
|
|||
import org.apache.nifi.connectable.Position;
|
||||
import org.apache.nifi.controller.AbstractComponentNode;
|
||||
import org.apache.nifi.controller.ComponentNode;
|
||||
import org.apache.nifi.controller.ConfigurationContext;
|
||||
import org.apache.nifi.controller.FlowAnalysisRuleNode;
|
||||
import org.apache.nifi.controller.FlowController;
|
||||
import org.apache.nifi.controller.MissingBundleException;
|
||||
|
@ -49,7 +48,6 @@ import org.apache.nifi.controller.inheritance.FlowInheritabilityCheck;
|
|||
import org.apache.nifi.controller.inheritance.MissingComponentsCheck;
|
||||
import org.apache.nifi.controller.reporting.ReportingTaskInstantiationException;
|
||||
import org.apache.nifi.controller.service.ControllerServiceNode;
|
||||
import org.apache.nifi.controller.service.StandardConfigurationContext;
|
||||
import org.apache.nifi.encrypt.PropertyEncryptor;
|
||||
import org.apache.nifi.flow.Bundle;
|
||||
import org.apache.nifi.flow.ExecutionEngine;
|
||||
|
@ -72,6 +70,8 @@ import org.apache.nifi.groups.ComponentScheduler;
|
|||
import org.apache.nifi.groups.FlowSynchronizationOptions;
|
||||
import org.apache.nifi.groups.ProcessGroup;
|
||||
import org.apache.nifi.logging.LogLevel;
|
||||
import org.apache.nifi.migration.ControllerServiceFactory;
|
||||
import org.apache.nifi.migration.StandardControllerServiceFactory;
|
||||
import org.apache.nifi.nar.ExtensionManager;
|
||||
import org.apache.nifi.parameter.Parameter;
|
||||
import org.apache.nifi.parameter.ParameterContext;
|
||||
|
@ -597,8 +597,9 @@ public class VersionedFlowSynchronizer implements FlowSynchronizer {
|
|||
final ReportingTaskNode taskNode = controller.createReportingTask(reportingTask.getType(), reportingTask.getInstanceIdentifier(), coordinate, false);
|
||||
updateReportingTask(taskNode, reportingTask, controller);
|
||||
|
||||
final ConfigurationContext configurationContext = new StandardConfigurationContext(taskNode, controller.getControllerServiceProvider(), taskNode.getSchedulingPeriod());
|
||||
taskNode.migrateConfiguration(configurationContext);
|
||||
final ControllerServiceFactory serviceFactory = new StandardControllerServiceFactory(controller.getExtensionManager(), controller.getFlowManager(),
|
||||
controller.getControllerServiceProvider(), taskNode);
|
||||
taskNode.migrateConfiguration(serviceFactory);
|
||||
}
|
||||
|
||||
private void updateReportingTask(final ReportingTaskNode taskNode, final VersionedReportingTask reportingTask, final FlowController controller) {
|
||||
|
@ -639,11 +640,8 @@ public class VersionedFlowSynchronizer implements FlowSynchronizer {
|
|||
}
|
||||
}
|
||||
|
||||
private void inheritFlowAnalysisRules(
|
||||
final FlowController controller,
|
||||
final VersionedDataflow dataflow,
|
||||
final AffectedComponentSet affectedComponentSet
|
||||
) throws FlowAnalysisRuleInstantiationException {
|
||||
private void inheritFlowAnalysisRules(final FlowController controller, final VersionedDataflow dataflow, final AffectedComponentSet affectedComponentSet)
|
||||
throws FlowAnalysisRuleInstantiationException {
|
||||
// Guard state in order to be able to read flow.json from before adding the flow analysis rules
|
||||
if (dataflow.getFlowAnalysisRules() == null) {
|
||||
return;
|
||||
|
@ -915,8 +913,9 @@ public class VersionedFlowSynchronizer implements FlowSynchronizer {
|
|||
}
|
||||
|
||||
for (final ControllerServiceNode service : controllerServicesAdded) {
|
||||
final ConfigurationContext configurationContext = new StandardConfigurationContext(service, controller.getControllerServiceProvider(), null);
|
||||
service.migrateConfiguration(configurationContext);
|
||||
final ControllerServiceFactory serviceFactory = new StandardControllerServiceFactory(controller.getExtensionManager(), controller.getFlowManager(),
|
||||
controller.getControllerServiceProvider(), service);
|
||||
service.migrateConfiguration(serviceFactory);
|
||||
}
|
||||
|
||||
for (final VersionedControllerService versionedControllerService : controllerServices) {
|
||||
|
@ -975,11 +974,10 @@ public class VersionedFlowSynchronizer implements FlowSynchronizer {
|
|||
|
||||
private void inheritAuthorizations(final DataFlow existingFlow, final DataFlow proposedFlow, final FlowController controller) {
|
||||
final Authorizer authorizer = controller.getAuthorizer();
|
||||
if (!(authorizer instanceof ManagedAuthorizer)) {
|
||||
if (!(authorizer instanceof final ManagedAuthorizer managedAuthorizer)) {
|
||||
return;
|
||||
}
|
||||
|
||||
final ManagedAuthorizer managedAuthorizer = (ManagedAuthorizer) authorizer;
|
||||
final String proposedAuthFingerprint = proposedFlow.getAuthorizerFingerprint() == null ? "" : new String(proposedFlow.getAuthorizerFingerprint(), StandardCharsets.UTF_8);
|
||||
|
||||
final FlowInheritabilityCheck authorizerCheck = new AuthorizerCheck();
|
||||
|
@ -1229,17 +1227,9 @@ public class VersionedFlowSynchronizer implements FlowSynchronizer {
|
|||
}
|
||||
|
||||
switch (component.getConnectableType()) {
|
||||
case PROCESSOR:
|
||||
flowController.startProcessor(component.getProcessGroupIdentifier(), component.getIdentifier());
|
||||
break;
|
||||
case INPUT_PORT:
|
||||
case OUTPUT_PORT:
|
||||
flowController.startConnectable(component);
|
||||
break;
|
||||
case REMOTE_INPUT_PORT:
|
||||
case REMOTE_OUTPUT_PORT:
|
||||
flowController.startTransmitting((RemoteGroupPort) component);
|
||||
break;
|
||||
case PROCESSOR -> flowController.startProcessor(component.getProcessGroupIdentifier(), component.getIdentifier());
|
||||
case INPUT_PORT, OUTPUT_PORT -> flowController.startConnectable(component);
|
||||
case REMOTE_INPUT_PORT, REMOTE_OUTPUT_PORT -> flowController.startTransmitting((RemoteGroupPort) component);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
package org.apache.nifi.processors.tests.system;
|
||||
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.controller.ControllerService;
|
||||
import org.apache.nifi.cs.tests.system.MigrationService;
|
||||
import org.apache.nifi.flowfile.FlowFile;
|
||||
import org.apache.nifi.migration.PropertyConfiguration;
|
||||
import org.apache.nifi.migration.RelationshipConfiguration;
|
||||
|
@ -29,6 +31,7 @@ import org.apache.nifi.processor.exception.ProcessException;
|
|||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
|
@ -54,6 +57,12 @@ public class MigrateProperties extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.build();
|
||||
|
||||
static PropertyDescriptor SERVICE = new PropertyDescriptor.Builder()
|
||||
.name("Service")
|
||||
.required(false)
|
||||
.identifiesControllerService(ControllerService.class)
|
||||
.build();
|
||||
|
||||
static Relationship REL_ODD = new Relationship.Builder().name("odd").build();
|
||||
static Relationship REL_EVEN = new Relationship.Builder().name("even").build();
|
||||
static Relationship REL_BROKEN = new Relationship.Builder().name("broken").build();
|
||||
|
@ -62,7 +71,8 @@ public class MigrateProperties extends AbstractProcessor {
|
|||
private static final List<PropertyDescriptor> properties = List.of(
|
||||
INGEST,
|
||||
ATTRIBUTE_NAME,
|
||||
ATTRIBUTE_VALUE
|
||||
ATTRIBUTE_VALUE,
|
||||
SERVICE
|
||||
);
|
||||
|
||||
private final AtomicLong counter = new AtomicLong(0L);
|
||||
|
@ -85,7 +95,14 @@ public class MigrateProperties extends AbstractProcessor {
|
|||
config.renameProperty("attr-value", ATTRIBUTE_VALUE.getName());
|
||||
config.renameProperty("never-existed", "still-doesnt-exist");
|
||||
config.setProperty("New Property", config.getPropertyValue(INGEST).orElse("New Value"));
|
||||
final String ignoredValue = config.getPropertyValue("ignored").orElse(null);
|
||||
config.removeProperty("ignored");
|
||||
|
||||
// If the 'ignored' value was set, create a new Controller Service whose Start value is set to that value.
|
||||
if (ignoredValue != null && ignoredValue.matches("\\d+")) {
|
||||
final String serviceId = config.createControllerService(MigrationService.class.getName(), Map.of("Start", ignoredValue));
|
||||
config.setProperty(SERVICE, serviceId);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.nifi.toolkit.cli.impl.client.nifi.NiFiClientException;
|
|||
import org.apache.nifi.web.api.dto.ProcessorConfigDTO;
|
||||
import org.apache.nifi.web.api.entity.ConnectionEntity;
|
||||
import org.apache.nifi.web.api.entity.ControllerServiceEntity;
|
||||
import org.apache.nifi.web.api.entity.ProcessGroupEntity;
|
||||
import org.apache.nifi.web.api.entity.ProcessorEntity;
|
||||
import org.apache.nifi.web.api.entity.ReportingTaskEntity;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
|
@ -37,6 +38,8 @@ import java.nio.file.Path;
|
|||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
@ -46,6 +49,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
|
|||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class PropertyMigrationIT extends NiFiSystemIT {
|
||||
private static final String SERVICE = "Service";
|
||||
|
||||
@AfterEach
|
||||
public void restoreNars() {
|
||||
|
@ -57,6 +61,87 @@ public class PropertyMigrationIT extends NiFiSystemIT {
|
|||
getNiFiInstance().start(true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testControllerServiceCreated() throws NiFiClientException, IOException {
|
||||
final ProcessGroupEntity group1 = getClientUtil().createProcessGroup("Group 1", "root");
|
||||
final ProcessGroupEntity group2 = getClientUtil().createProcessGroup("Group 2", "root");
|
||||
|
||||
final ProcessorEntity proc1 = getClientUtil().createProcessor("MigrateProperties", group1.getId());
|
||||
final ProcessorEntity proc2 = getClientUtil().createProcessor("MigrateProperties", group1.getId());
|
||||
final ProcessorEntity proc3 = getClientUtil().createProcessor("MigrateProperties", group1.getId());
|
||||
final ProcessorEntity proc4 = getClientUtil().createProcessor("MigrateProperties", group2.getId());
|
||||
|
||||
// Update proc1 and proc2 with the same values.
|
||||
// Set same values for proc4, which is in a different group.
|
||||
final Map<String, String> proc1Properties = Map.of(
|
||||
"attr-to-add", "greeting",
|
||||
"attr-value", "Hi",
|
||||
"ignored", "17"
|
||||
);
|
||||
getClientUtil().updateProcessorProperties(proc1, proc1Properties);
|
||||
getClientUtil().updateProcessorProperties(proc2, proc1Properties);
|
||||
getClientUtil().updateProcessorProperties(proc4, proc1Properties);
|
||||
|
||||
final Map<String, String> proc3Properties = new HashMap<>(proc1Properties);
|
||||
proc3Properties.put("ignored", "41");
|
||||
getClientUtil().updateProcessorProperties(proc3, proc3Properties);
|
||||
|
||||
// Stop NiFi, switch out the system-tests-extensions nar for the alternate-config-nar, and restart
|
||||
getNiFiInstance().stop();
|
||||
switchOutNars();
|
||||
getNiFiInstance().start(true);
|
||||
|
||||
// Procs 1 and 2 should have the same value for the Controller Service.
|
||||
// Procs 3 and 4 should each have different values
|
||||
final ControllerServicesClient serviceClient = getNifiClient().getControllerServicesClient();
|
||||
|
||||
final Map<String, String> proc1UpdatedProps = getProperties(proc1);
|
||||
final Map<String, String> proc2UpdatedProps = getProperties(proc2);
|
||||
final Map<String, String> proc3UpdatedProps = getProperties(proc3);
|
||||
final Map<String, String> proc4UpdatedProps = getProperties(proc4);
|
||||
|
||||
final Set<String> serviceIds = new HashSet<>();
|
||||
for (final Map<String, String> propertiesMap : List.of(proc1UpdatedProps, proc2UpdatedProps, proc3UpdatedProps, proc4UpdatedProps)) {
|
||||
final String serviceId = propertiesMap.get(SERVICE);
|
||||
assertNotNull(serviceId);
|
||||
serviceIds.add(serviceId);
|
||||
}
|
||||
|
||||
// Should be 3 different services
|
||||
assertEquals(3, serviceIds.size());
|
||||
|
||||
// Procs 1 and 2 should reference the same service.
|
||||
assertEquals(proc1UpdatedProps.get(SERVICE), proc2UpdatedProps.get(SERVICE));
|
||||
|
||||
// Services for procs 1-3 should be in group 1
|
||||
for (final String serviceId : List.of(proc1UpdatedProps.get(SERVICE), proc2UpdatedProps.get(SERVICE), proc3UpdatedProps.get(SERVICE))) {
|
||||
assertEquals(group1.getId(), serviceClient.getControllerService(serviceId).getParentGroupId());
|
||||
}
|
||||
|
||||
// Service for proc 4 should be in group 2
|
||||
assertEquals(group2.getId(), serviceClient.getControllerService(proc4UpdatedProps.get(SERVICE)).getParentGroupId());
|
||||
|
||||
// Ensure that the service's properties were also migrated, since the processor mapped the "ignored" value to the old property name of the service.
|
||||
final ControllerServiceEntity service1 = serviceClient.getControllerService(proc1UpdatedProps.get(SERVICE));
|
||||
final Map<String, String> service1Props = service1.getComponent().getProperties();
|
||||
assertEquals(Map.of("Initial Value", "17"), service1Props);
|
||||
assertEquals(2, service1.getComponent().getReferencingComponents().size());
|
||||
|
||||
final ControllerServiceEntity service4 = serviceClient.getControllerService(proc4UpdatedProps.get(SERVICE));
|
||||
final Map<String, String> service4Props = service4.getComponent().getProperties();
|
||||
assertEquals(Map.of("Initial Value", "17"), service4Props);
|
||||
assertEquals(1, service4.getComponent().getReferencingComponents().size());
|
||||
|
||||
final ControllerServiceEntity service3 = serviceClient.getControllerService(proc3UpdatedProps.get(SERVICE));
|
||||
final Map<String, String> service3Props = service3.getComponent().getProperties();
|
||||
assertEquals(Map.of("Initial Value", "41"), service3Props);
|
||||
assertEquals(1, service3.getComponent().getReferencingComponents().size());
|
||||
}
|
||||
|
||||
private Map<String, String> getProperties(final ProcessorEntity processor) throws NiFiClientException, IOException {
|
||||
return getNifiClient().getProcessorClient().getProcessor(processor.getId()).getComponent().getConfig().getProperties();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPropertyMigration() throws NiFiClientException, IOException {
|
||||
final ProcessorEntity migrate = getClientUtil().createProcessor("MigrateProperties");
|
||||
|
@ -94,17 +179,18 @@ public class PropertyMigrationIT extends NiFiSystemIT {
|
|||
// Stop NiFi, switch out the system-tests-extensions nar for the alternate-config-nar, and restart
|
||||
getNiFiInstance().stop();
|
||||
switchOutNars();
|
||||
|
||||
getNiFiInstance().start(true);
|
||||
|
||||
// Ensure that the Processor's config was properly updated
|
||||
final ProcessorEntity updated = getNifiClient().getProcessorClient().getProcessor(migrate.getId());
|
||||
final Map<String, String> updatedProperties = updated.getComponent().getConfig().getProperties();
|
||||
|
||||
final Map<String, String> expectedUpdatedProperties = Map.of("Ingest Data", "true",
|
||||
"Attribute to add", "greeting",
|
||||
"Attribute Value", "Hi",
|
||||
"New Property", "true");
|
||||
final Map<String, String> expectedUpdatedProperties = new HashMap<>();
|
||||
expectedUpdatedProperties.put("Ingest Data", "true");
|
||||
expectedUpdatedProperties.put("Attribute to add", "greeting");
|
||||
expectedUpdatedProperties.put("Attribute Value", "Hi");
|
||||
expectedUpdatedProperties.put("New Property", "true");
|
||||
expectedUpdatedProperties.put("Service", null);
|
||||
assertEquals(expectedUpdatedProperties, updatedProperties);
|
||||
|
||||
final ProcessorConfigDTO updatedConfig = updated.getComponent().getConfig();
|
||||
|
|
Loading…
Reference in New Issue