NIFI-12613 Renamed asDescribedValue() to asAllowableValue()

This closes #8260

Signed-off-by: David Handermann <exceptionfactory@apache.org>
This commit is contained in:
EndzeitBegins 2024-01-17 19:05:41 +01:00 committed by exceptionfactory
parent 2a3a7d9379
commit 53bb995e4e
No known key found for this signature in database
38 changed files with 88 additions and 70 deletions

View File

@ -135,16 +135,18 @@ public interface PropertyValue {
ResourceReferences asResources();
/**
* @param <E> the generic type of the enum used as allowable values
* @param enumType the class of the enum used as allowable values
* @return the DescribedValue enum entry whose value is the raw value of the
* <code>this</code>, or <code>null</code> if the value is not set.
* Throws an IllegalArgumentException if none of the enum entries correspond to the specified raw value.
* Returns the property value as one of the configured allowableValues, see {@link PropertyDescriptor.Builder#allowableValues(Class)}
* <p>
* The {@link Class#getEnumConstants()} of the provided enum are searched for an entry matching the value of <code>this</code>.
* In case an enum value is a {@link DescribedValue}, uses the defined {@link DescribedValue#getValue()} for comparison.
* Otherwise, the {@link Enum#name()} is used.
*
* @throws IllegalArgumentException if the value of <code>this</code>
* does not point to any of the entries of the specified enum type.
* @param <E> the generic type of the enum used as allowableValues
* @param enumType the class of the enum used as allowableValues
* @return the matching enum entry, or <code>null</code> if the value is not set.
* @throws IllegalArgumentException if no enum entry matching the value of <code>this</code> is found.
*/
<E extends Enum<E> & DescribedValue> E asDescribedValue(Class<E> enumType) throws IllegalArgumentException;
<E extends Enum<E>> E asAllowableValue(Class<E> enumType) throws IllegalArgumentException;
/**
* @return <code>true</code> if the user has configured a value, or if the

View File

@ -237,13 +237,17 @@ public class StandardPropertyValue implements PropertyValue {
}
@Override
public <E extends Enum<E> & DescribedValue> E asDescribedValue(Class<E> enumType) throws IllegalArgumentException {
public <E extends Enum<E>> E asAllowableValue(Class<E> enumType) throws IllegalArgumentException {
if (rawValue == null) {
return null;
}
for (E enumConstant : enumType.getEnumConstants()) {
if (enumConstant.getValue().equals(rawValue)) {
if (enumConstant instanceof DescribedValue describedValue) {
if (describedValue.getValue().equals(rawValue)) {
return enumConstant;
}
} else if (enumConstant.name().equals(rawValue)) {
return enumConstant;
}
}

View File

@ -25,7 +25,6 @@ import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.attribute.expression.language.Query;
import org.apache.nifi.attribute.expression.language.Query.Range;
import org.apache.nifi.attribute.expression.language.StandardPropertyValue;
import org.apache.nifi.components.DescribedValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.resource.ResourceContext;
@ -324,9 +323,9 @@ public class MockPropertyValue implements PropertyValue {
}
@Override
public <E extends Enum<E> & DescribedValue> E asDescribedValue(Class<E> enumType) throws IllegalArgumentException {
public <E extends Enum<E>> E asAllowableValue(Class<E> enumType) throws IllegalArgumentException {
ensureExpressionsEvaluated();
return stdPropValue.asDescribedValue(enumType);
return stdPropValue.asAllowableValue(enumType);
}
@Override

View File

@ -341,7 +341,7 @@ public class GetAsanaObject extends AbstractProcessor {
}
protected AsanaObjectFetcher createObjectFetcher(final ProcessContext context, AsanaClient client) {
final AsanaObjectType objectType = context.getProperty(PROP_ASANA_OBJECT_TYPE).asDescribedValue(AsanaObjectType.class);
final AsanaObjectType objectType = context.getProperty(PROP_ASANA_OBJECT_TYPE).asAllowableValue(AsanaObjectType.class);
final String projectName = context.getProperty(PROP_ASANA_PROJECT).getValue();
final String sectionName = context.getProperty(PROP_ASANA_SECTION).getValue();
final String teamName = context.getProperty(PROP_ASANA_TEAM_NAME).getValue();

View File

@ -76,7 +76,7 @@ public interface ClientSideEncryptionSupport {
default Collection<ValidationResult> validateClientSideEncryptionProperties(ValidationContext validationContext) {
final List<ValidationResult> validationResults = new ArrayList<>();
final ClientSideEncryptionMethod cseKeyType = validationContext.getProperty(CSE_KEY_TYPE).asDescribedValue(ClientSideEncryptionMethod.class);
final ClientSideEncryptionMethod cseKeyType = validationContext.getProperty(CSE_KEY_TYPE).asAllowableValue(ClientSideEncryptionMethod.class);
final String cseKeyId = validationContext.getProperty(CSE_KEY_ID).getValue();
final String cseLocalKey = validationContext.getProperty(CSE_LOCAL_KEY).getValue();
if (cseKeyType != ClientSideEncryptionMethod.NONE && StringUtils.isBlank(cseKeyId)) {
@ -114,7 +114,7 @@ public interface ClientSideEncryptionSupport {
}
default boolean isClientSideEncryptionEnabled(PropertyContext context) {
final ClientSideEncryptionMethod cseKeyType = context.getProperty(CSE_KEY_TYPE).asDescribedValue(ClientSideEncryptionMethod.class);
final ClientSideEncryptionMethod cseKeyType = context.getProperty(CSE_KEY_TYPE).asAllowableValue(ClientSideEncryptionMethod.class);
return cseKeyType != ClientSideEncryptionMethod.NONE;
}

View File

@ -490,7 +490,7 @@ public class ConsumeAzureEventHub extends AbstractSessionFactoryProcessor implem
final Long receiveTimeout = context.getProperty(RECEIVE_TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS);
final Duration maxWaitTime = Duration.ofMillis(receiveTimeout);
final Integer maxBatchSize = context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger();
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asDescribedValue(AzureEventHubTransportType.class).asAmqpTransportType();
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asAllowableValue(AzureEventHubTransportType.class).asAmqpTransportType();
final EventProcessorClientBuilder eventProcessorClientBuilder = new EventProcessorClientBuilder()
.transportType(transportType)

View File

@ -360,7 +360,7 @@ public class GetAzureEventHub extends AbstractProcessor implements AzureEventHub
final String serviceBusEndpoint = context.getProperty(SERVICE_BUS_ENDPOINT).getValue();
final boolean useManagedIdentity = context.getProperty(USE_MANAGED_IDENTITY).asBoolean();
final String fullyQualifiedNamespace = String.format("%s%s", namespace, serviceBusEndpoint);
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asDescribedValue(AzureEventHubTransportType.class).asAmqpTransportType();
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asAllowableValue(AzureEventHubTransportType.class).asAmqpTransportType();
final EventHubClientBuilder eventHubClientBuilder = new EventHubClientBuilder();
eventHubClientBuilder.transportType(transportType);

View File

@ -187,7 +187,7 @@ public class PutAzureEventHub extends AbstractProcessor implements AzureEventHub
final String namespace = context.getProperty(NAMESPACE).getValue();
final String serviceBusEndpoint = context.getProperty(SERVICE_BUS_ENDPOINT).getValue();
final String eventHubName = context.getProperty(EVENT_HUB_NAME).getValue();
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asDescribedValue(AzureEventHubTransportType.class).asAmqpTransportType();
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asAllowableValue(AzureEventHubTransportType.class).asAmqpTransportType();
try {
final EventHubClientBuilder eventHubClientBuilder = new EventHubClientBuilder();

View File

@ -200,7 +200,7 @@ public class CopyAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 {
).orElse(sourceBlobName);
final boolean createContainer = context.getProperty(AzureStorageUtils.CREATE_CONTAINER).asBoolean();
final AzureStorageConflictResolutionStrategy conflictResolution = context.getProperty(AzureStorageUtils.CONFLICT_RESOLUTION).asDescribedValue(AzureStorageConflictResolutionStrategy.class);
final AzureStorageConflictResolutionStrategy conflictResolution = context.getProperty(AzureStorageUtils.CONFLICT_RESOLUTION).asAllowableValue(AzureStorageConflictResolutionStrategy.class);
final long startNanos = System.nanoTime();
try {

View File

@ -138,7 +138,7 @@ public class PutAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 impl
final String containerName = context.getProperty(AzureStorageUtils.CONTAINER).evaluateAttributeExpressions(flowFile).getValue();
final boolean createContainer = context.getProperty(AzureStorageUtils.CREATE_CONTAINER).asBoolean();
final String blobName = context.getProperty(BLOB_NAME).evaluateAttributeExpressions(flowFile).getValue();
final AzureStorageConflictResolutionStrategy conflictResolution = context.getProperty(AzureStorageUtils.CONFLICT_RESOLUTION).asDescribedValue(AzureStorageConflictResolutionStrategy.class);
final AzureStorageConflictResolutionStrategy conflictResolution = context.getProperty(AzureStorageUtils.CONFLICT_RESOLUTION).asAllowableValue(AzureStorageConflictResolutionStrategy.class);
final ResourceTransferSource resourceTransferSource = ResourceTransferSource.valueOf(context.getProperty(RESOURCE_TRANSFER_SOURCE).getValue());
long startNanos = System.nanoTime();

View File

@ -148,7 +148,7 @@ public class StandardKustoQueryService extends AbstractControllerService impleme
private ConnectionStringBuilder getConnectionStringBuilder(final ConfigurationContext context) {
final String clusterUrl = context.getProperty(CLUSTER_URI).getValue();
final String clientId = context.getProperty(APPLICATION_CLIENT_ID).getValue();
final KustoAuthenticationStrategy kustoAuthenticationStrategy = context.getProperty(AUTHENTICATION_STRATEGY).asDescribedValue(KustoAuthenticationStrategy.class);
final KustoAuthenticationStrategy kustoAuthenticationStrategy = context.getProperty(AUTHENTICATION_STRATEGY).asAllowableValue(KustoAuthenticationStrategy.class);
final ConnectionStringBuilder builder = switch (kustoAuthenticationStrategy) {
case APPLICATION_CREDENTIALS -> {

View File

@ -177,8 +177,8 @@ public class AzureEventHubRecordSink extends AbstractControllerService implement
final String eventHubName = context.getProperty(EVENT_HUB_NAME).evaluateAttributeExpressions().getValue();
final String policyName = context.getProperty(SHARED_ACCESS_POLICY).getValue();
final String policyKey = context.getProperty(SHARED_ACCESS_POLICY_KEY).getValue();
final AzureAuthenticationStrategy azureAuthenticationStrategy = context.getProperty(AUTHENTICATION_STRATEGY).asDescribedValue(AzureAuthenticationStrategy.class);
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asDescribedValue(AzureEventHubTransportType.class).asAmqpTransportType();
final AzureAuthenticationStrategy azureAuthenticationStrategy = context.getProperty(AUTHENTICATION_STRATEGY).asAllowableValue(AzureAuthenticationStrategy.class);
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asAllowableValue(AzureEventHubTransportType.class).asAmqpTransportType();
client = createEventHubClient(namespace, serviceBusEndpoint, eventHubName, policyName, policyKey, azureAuthenticationStrategy, transportType);
}

View File

@ -35,7 +35,6 @@ import java.util.function.Function;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.CREDENTIALS_TYPE;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.MANAGED_IDENTITY_CLIENT_ID;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.PROXY_CONFIGURATION_SERVICE;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.SERVICE_PRINCIPAL_CLIENT_ID;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.SERVICE_PRINCIPAL_CLIENT_SECRET;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.SERVICE_PRINCIPAL_TENANT_ID;
@ -134,7 +133,7 @@ public class ADLSCredentialsControllerService extends AbstractControllerService
setValue(credentialsBuilder, ACCOUNT_KEY, PropertyValue::getValue, ADLSCredentialsDetails.Builder::setAccountKey, attributes);
setValue(credentialsBuilder, SAS_TOKEN, PropertyValue::getValue, ADLSCredentialsDetails.Builder::setSasToken, attributes);
setValue(credentialsBuilder, ENDPOINT_SUFFIX, PropertyValue::getValue, ADLSCredentialsDetails.Builder::setEndpointSuffix, attributes);
setValue(credentialsBuilder, CREDENTIALS_TYPE, property -> property.asDescribedValue(AzureStorageCredentialsType.class) == AzureStorageCredentialsType.MANAGED_IDENTITY,
setValue(credentialsBuilder, CREDENTIALS_TYPE, property -> property.asAllowableValue(AzureStorageCredentialsType.class) == AzureStorageCredentialsType.MANAGED_IDENTITY,
ADLSCredentialsDetails.Builder::setUseManagedIdentity, attributes);
setValue(credentialsBuilder, MANAGED_IDENTITY_CLIENT_ID, PropertyValue::getValue, ADLSCredentialsDetails.Builder::setManagedIdentityClientId, attributes);
setValue(credentialsBuilder, SERVICE_PRINCIPAL_TENANT_ID, PropertyValue::getValue, ADLSCredentialsDetails.Builder::setServicePrincipalTenantId, attributes);

View File

@ -33,7 +33,6 @@ import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.A
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.ACCOUNT_NAME;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.CREDENTIALS_TYPE;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.MANAGED_IDENTITY_CLIENT_ID;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.PROXY_CONFIGURATION_SERVICE;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.SAS_TOKEN;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.SERVICE_PRINCIPAL_CLIENT_ID;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.SERVICE_PRINCIPAL_CLIENT_SECRET;
@ -87,7 +86,7 @@ public class AzureStorageCredentialsControllerService_v12 extends AbstractContro
public AzureStorageCredentialsDetails_v12 getCredentialsDetails(Map<String, String> attributes) {
String accountName = context.getProperty(ACCOUNT_NAME).getValue();
String endpointSuffix = context.getProperty(ENDPOINT_SUFFIX).getValue();
AzureStorageCredentialsType credentialsType = context.getProperty(CREDENTIALS_TYPE).asDescribedValue(AzureStorageCredentialsType.class);
AzureStorageCredentialsType credentialsType = context.getProperty(CREDENTIALS_TYPE).asAllowableValue(AzureStorageCredentialsType.class);
ProxyOptions proxyOptions = AzureStorageUtils.getProxyOptions(context);
switch (credentialsType) {

View File

@ -580,7 +580,7 @@ public class CaptureChangeMySQL extends AbstractSessionFactoryProcessor {
binlogResourceInfo.setInTransaction("true".equals(stateMap.get("inTransaction")));
// Build a event writer config object for the event writers to use
final FlowFileEventWriteStrategy flowFileEventWriteStrategy = context.getProperty(EVENTS_PER_FLOWFILE_STRATEGY).asDescribedValue(FlowFileEventWriteStrategy.class);
final FlowFileEventWriteStrategy flowFileEventWriteStrategy = context.getProperty(EVENTS_PER_FLOWFILE_STRATEGY).asAllowableValue(FlowFileEventWriteStrategy.class);
eventWriterConfiguration = new EventWriterConfiguration(
flowFileEventWriteStrategy,
context.getProperty(NUMBER_OF_EVENTS_PER_FLOWFILE).evaluateAttributeExpressions().asInteger()

View File

@ -161,11 +161,11 @@ public class DecryptContent extends AbstractProcessor {
return;
}
final KeySpecificationFormat keySpecificationFormat = context.getProperty(KEY_SPECIFICATION_FORMAT).asDescribedValue(KeySpecificationFormat.class);
final KeySpecificationFormat keySpecificationFormat = context.getProperty(KEY_SPECIFICATION_FORMAT).asAllowableValue(KeySpecificationFormat.class);
final String cipherTransformation = getCipherTransformation(context);
final Cipher cipher = getCipher(cipherTransformation);
final CipherAlgorithmMode cipherAlgorithmMode = context.getProperty(CIPHER_ALGORITHM_MODE).asDescribedValue(CipherAlgorithmMode.class);
final CipherAlgorithmMode cipherAlgorithmMode = context.getProperty(CIPHER_ALGORITHM_MODE).asAllowableValue(CipherAlgorithmMode.class);
final KeySpec keySpec = getKeySpec(context, keySpecificationFormat);
final StreamCallback callback = new DecryptCallback(cipher, cipherAlgorithmMode, keySpec);

View File

@ -224,7 +224,7 @@ public class DecryptContentAge extends AbstractProcessor implements VerifiablePr
}
private List<RecipientStanzaReader> getRecipientStanzaReaders(final PropertyContext context) throws IOException {
final KeySource keySource = context.getProperty(PRIVATE_KEY_SOURCE).asDescribedValue(KeySource.class);
final KeySource keySource = context.getProperty(PRIVATE_KEY_SOURCE).asAllowableValue(KeySource.class);
final List<ResourceReference> resources = switch (keySource) {
case PROPERTIES -> List.of(context.getProperty(PRIVATE_KEY_IDENTITIES).asResource());
case RESOURCES -> context.getProperty(PRIVATE_KEY_IDENTITY_RESOURCES).asResources().asList();

View File

@ -143,7 +143,7 @@ public class DecryptContentCompatibility extends AbstractProcessor {
}
final CompatibilityModeEncryptionScheme encryptionScheme =
context.getProperty(ENCRYPTION_SCHEME).asDescribedValue(CompatibilityModeEncryptionScheme.class);
context.getProperty(ENCRYPTION_SCHEME).asAllowableValue(CompatibilityModeEncryptionScheme.class);
final String scheme = encryptionScheme.getValue();
final Cipher cipher = getCipher(scheme);
@ -151,7 +151,7 @@ public class DecryptContentCompatibility extends AbstractProcessor {
final PBEKeySpec keySpec = new PBEKeySpec(password);
final CompatibilityModeKeyDerivationStrategy keyDerivationStrategy =
context.getProperty(KEY_DERIVATION_STRATEGY).asDescribedValue(CompatibilityModeKeyDerivationStrategy.class);
context.getProperty(KEY_DERIVATION_STRATEGY).asAllowableValue(CompatibilityModeKeyDerivationStrategy.class);
final StreamCallback callback = new DecryptCallback(cipher, keySpec, keyDerivationStrategy);
final Map<String, String> attributes = new LinkedHashMap<>();

View File

@ -214,7 +214,7 @@ public class EncryptContentAge extends AbstractProcessor implements VerifiablePr
}
try {
final FileEncoding fileEncoding = context.getProperty(FILE_ENCODING).asDescribedValue(FileEncoding.class);
final FileEncoding fileEncoding = context.getProperty(FILE_ENCODING).asAllowableValue(FileEncoding.class);
final EncryptingChannelFactory encryptingChannelFactory = getEncryptingChannelFactory(fileEncoding);
final StreamCallback streamCallback = new EncryptingStreamCallback(configuredRecipientStanzaWriters, encryptingChannelFactory);
flowFile = session.write(flowFile, streamCallback);
@ -234,7 +234,7 @@ public class EncryptContentAge extends AbstractProcessor implements VerifiablePr
}
private List<RecipientStanzaWriter> getRecipientStanzaWriters(final PropertyContext context) throws IOException {
final KeySource keySource = context.getProperty(PUBLIC_KEY_SOURCE).asDescribedValue(KeySource.class);
final KeySource keySource = context.getProperty(PUBLIC_KEY_SOURCE).asAllowableValue(KeySource.class);
final List<ResourceReference> resources = switch (keySource) {
case PROPERTIES -> List.of(context.getProperty(PUBLIC_KEY_RECIPIENTS).asResource());
case RESOURCES -> context.getProperty(PUBLIC_KEY_RECIPIENT_RESOURCES).asResources().asList();

View File

@ -195,7 +195,7 @@ public class ModifyCompression extends AbstractProcessor {
}
final CompressionStrategy inputCompressionStrategy;
final CompressionStrategy configuredInputCompressionStrategy = context.getProperty(INPUT_COMPRESSION_STRATEGY).asDescribedValue(CompressionStrategy.class);
final CompressionStrategy configuredInputCompressionStrategy = context.getProperty(INPUT_COMPRESSION_STRATEGY).asAllowableValue(CompressionStrategy.class);
if (CompressionStrategy.MIME_TYPE_ATTRIBUTE == configuredInputCompressionStrategy) {
final String mimeType = flowFile.getAttribute(CoreAttributes.MIME_TYPE.key());
if (mimeType == null) {
@ -214,7 +214,7 @@ public class ModifyCompression extends AbstractProcessor {
inputCompressionStrategy = configuredInputCompressionStrategy;
}
final CompressionStrategy outputCompressionStrategy = context.getProperty(OUTPUT_COMPRESSION_STRATEGY).asDescribedValue(CompressionStrategy.class);
final CompressionStrategy outputCompressionStrategy = context.getProperty(OUTPUT_COMPRESSION_STRATEGY).asAllowableValue(CompressionStrategy.class);
final AtomicReference<String> mimeTypeRef = new AtomicReference<>(null);
final StopWatch stopWatch = new StopWatch(true);
final long inputFileSize = flowFile.getSize();

View File

@ -25,7 +25,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.nifi.components.DescribedValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.resource.ResourceReference;
@ -154,7 +153,7 @@ public class ConnectableProcessContext implements ProcessContext {
}
@Override
public <E extends Enum<E> & DescribedValue> E asDescribedValue(Class<E> enumType) throws IllegalArgumentException {
public <E extends Enum<E>> E asAllowableValue(Class<E> enumType) throws IllegalArgumentException {
return null;
}

View File

@ -111,20 +111,34 @@ public class TestStandardPropertyValue {
}
@Test
public void testGetValueAsDescribedValue() {
for (ExamplePropertyEnum enumValue : ExamplePropertyEnum.values()) {
public void testGetValueAsAllowableValue() {
for (ExampleDescribedValueEnum enumValue : ExampleDescribedValueEnum.values()) {
final PropertyValue value = new StandardPropertyValue(enumValue.getValue(), lookup, ParameterLookup.EMPTY);
assertEquals(enumValue, value.asDescribedValue(ExamplePropertyEnum.class));
assertEquals(enumValue, value.asAllowableValue(ExampleDescribedValueEnum.class));
}
final PropertyValue nullDescribedValue = new StandardPropertyValue(null, lookup, ParameterLookup.EMPTY);
assertNull(nullDescribedValue.asAllowableValue(ExampleDescribedValueEnum.class));
IllegalArgumentException describedValueException = assertThrows(IllegalArgumentException.class, () -> {
final PropertyValue invalidValue = new StandardPropertyValue("FOO", lookup, ParameterLookup.EMPTY);
invalidValue.asAllowableValue(ExampleDescribedValueEnum.class);
});
assertEquals("ExampleDescribedValueEnum does not have an entry with value FOO", describedValueException.getMessage());
for (ExampleNonDescribedValueEnum enumValue : ExampleNonDescribedValueEnum.values()) {
final PropertyValue value = new StandardPropertyValue(enumValue.name(), lookup, ParameterLookup.EMPTY);
assertEquals(enumValue, value.asAllowableValue(ExampleNonDescribedValueEnum.class));
}
final PropertyValue nullValue = new StandardPropertyValue(null, lookup, ParameterLookup.EMPTY);
assertNull(nullValue.asDescribedValue(ExamplePropertyEnum.class));
assertNull(nullValue.asAllowableValue(ExampleNonDescribedValueEnum.class));
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> {
final PropertyValue invalidValue = new StandardPropertyValue("FOO", lookup, ParameterLookup.EMPTY);
invalidValue.asDescribedValue(ExamplePropertyEnum.class);
invalidValue.asAllowableValue(ExampleNonDescribedValueEnum.class);
});
assertEquals("ExamplePropertyEnum does not have an entry with value FOO", exception.getMessage());
assertEquals("ExampleNonDescribedValueEnum does not have an entry with value FOO", exception.getMessage());
}
@Test
@ -197,7 +211,7 @@ public class TestStandardPropertyValue {
}
private enum ExamplePropertyEnum implements DescribedValue {
private enum ExampleDescribedValueEnum implements DescribedValue {
ONE("One Value", "One Display", "One Description"),
OTHER("Other Value", "Other Display", "Other Description"),
ANOTHER("Another Value", "Another Display", "Another Description");
@ -206,7 +220,7 @@ public class TestStandardPropertyValue {
private final String displayName;
private final String description;
ExamplePropertyEnum(final String value, final String displayName, final String description) {
ExampleDescribedValueEnum(final String value, final String displayName, final String description) {
this.value = value;
this.displayName = displayName;
this.description = description;
@ -227,4 +241,6 @@ public class TestStandardPropertyValue {
return this.description;
}
}
private enum ExampleNonDescribedValueEnum { ONE, TWO, THREE, FOUR }
}

View File

@ -286,7 +286,7 @@ public class PutIceberg extends AbstractIcebergProcessor {
final FileFormat format = getFileFormat(table.properties(), fileFormat);
final IcebergTaskWriterFactory taskWriterFactory = new IcebergTaskWriterFactory(table, flowFile.getId(), format, maximumFileSize);
taskWriter = taskWriterFactory.create();
final UnmatchedColumnBehavior unmatchedColumnBehavior = context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).asDescribedValue(UnmatchedColumnBehavior.class);
final UnmatchedColumnBehavior unmatchedColumnBehavior = context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).asAllowableValue(UnmatchedColumnBehavior.class);
final IcebergRecordConverter recordConverter = new IcebergRecordConverter(table.schema(), reader.getSchema(), format, unmatchedColumnBehavior, getLogger());

View File

@ -432,8 +432,8 @@ public class ConsumeKafkaRecord_2_6 extends AbstractProcessor implements KafkaCl
final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue();
final Charset charset = Charset.forName(charsetName);
final OutputStrategy outputStrategy = context.getProperty(OUTPUT_STRATEGY).asDescribedValue(OutputStrategy.class);
final KeyFormat keyFormat = context.getProperty(KEY_FORMAT).asDescribedValue(KeyFormat.class);
final OutputStrategy outputStrategy = context.getProperty(OUTPUT_STRATEGY).asAllowableValue(OutputStrategy.class);
final KeyFormat keyFormat = context.getProperty(KEY_FORMAT).asAllowableValue(KeyFormat.class);
final RecordReaderFactory keyReaderFactory = context.getProperty(KEY_RECORD_READER).asControllerService(RecordReaderFactory.class);
final String headerNameRegex = context.getProperty(HEADER_NAME_REGEX).getValue();
@ -441,7 +441,7 @@ public class ConsumeKafkaRecord_2_6 extends AbstractProcessor implements KafkaCl
final Pattern headerNamePattern = isActiveHeaderNamePattern ? Pattern.compile(headerNameRegex) : null;
final boolean separateByKey = context.getProperty(SEPARATE_BY_KEY).asBoolean();
final KeyEncoding keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).asDescribedValue(KeyEncoding.class);
final KeyEncoding keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).asAllowableValue(KeyEncoding.class);
final int[] partitionsToConsume;
try {

View File

@ -374,7 +374,7 @@ public class ConsumeKafka_2_6 extends AbstractProcessor implements KafkaClientCo
final String topicListing = context.getProperty(ConsumeKafka_2_6.TOPICS).evaluateAttributeExpressions().getValue();
final String topicType = context.getProperty(ConsumeKafka_2_6.TOPIC_TYPE).evaluateAttributeExpressions().getValue();
final List<String> topics = new ArrayList<>();
final KeyEncoding keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).asDescribedValue(KeyEncoding.class);
final KeyEncoding keyEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).asAllowableValue(KeyEncoding.class);
final String securityProtocol = context.getProperty(SECURITY_PROTOCOL).getValue();
final String bootstrapServers = context.getProperty(BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue();
final boolean honorTransactions = context.getProperty(HONOR_TRANSACTIONS).asBoolean();

View File

@ -439,7 +439,7 @@ public class PublishKafkaRecord_2_6 extends AbstractProcessor implements KafkaPu
final boolean useTransactions = context.getProperty(USE_TRANSACTIONS).asBoolean();
final String transactionalIdPrefix = context.getProperty(TRANSACTIONAL_ID_PREFIX).evaluateAttributeExpressions().getValue();
Supplier<String> transactionalIdSupplier = new TransactionIdSupplier(transactionalIdPrefix);
final PublishStrategy publishStrategy = context.getProperty(PUBLISH_STRATEGY).asDescribedValue(PublishStrategy.class);
final PublishStrategy publishStrategy = context.getProperty(PUBLISH_STRATEGY).asAllowableValue(PublishStrategy.class);
final String charsetName = context.getProperty(MESSAGE_HEADER_ENCODING).evaluateAttributeExpressions().getValue();
final Charset charset = Charset.forName(charsetName);
@ -616,7 +616,7 @@ public class PublishKafkaRecord_2_6 extends AbstractProcessor implements KafkaPu
}
private PublishFailureStrategy getFailureStrategy(final ProcessContext context) {
return switch (context.getProperty(FAILURE_STRATEGY).asDescribedValue(FailureStrategy.class)) {
return switch (context.getProperty(FAILURE_STRATEGY).asAllowableValue(FailureStrategy.class)) {
case ROUTE_TO_FAILURE -> (session, flowFiles) -> session.transfer(flowFiles, REL_FAILURE);
case ROLLBACK -> (session, flowFiles) -> session.rollback();
};

View File

@ -514,7 +514,7 @@ public class PublishKafka_2_6 extends AbstractProcessor implements KafkaPublishC
}
private PublishFailureStrategy getFailureStrategy(final ProcessContext context) {
return switch (context.getProperty(FAILURE_STRATEGY).asDescribedValue(FailureStrategy.class)) {
return switch (context.getProperty(FAILURE_STRATEGY).asAllowableValue(FailureStrategy.class)) {
case ROUTE_TO_FAILURE -> (session, flowFiles) -> session.transfer(flowFiles, REL_FAILURE);
case ROLLBACK -> (session, flowFiles) -> session.rollback();
};

View File

@ -500,7 +500,7 @@ public class TestConsumeKafkaMock {
final Pattern patternTopic = (topic == null) ? null : Pattern.compile(topic);
final String groupId = context.getProperty(ConsumerConfig.GROUP_ID_CONFIG).getValue();
final OutputStrategy outputStrategy = OutputStrategy.valueOf(context.getProperty("output-strategy").getValue());
final KeyFormat keyFormat = context.getProperty("key-format").asDescribedValue(KeyFormat.class);
final KeyFormat keyFormat = context.getProperty("key-format").asAllowableValue(KeyFormat.class);
final RecordReaderFactory keyReaderFactory = context.getProperty("key-record-reader")
.asControllerService(RecordReaderFactory.class);
return new ConsumerPool(

View File

@ -44,7 +44,7 @@ public class DelegatingLoginConfigProvider implements LoginConfigProvider {
*/
@Override
public String getConfiguration(final PropertyContext context) {
final SaslMechanism saslMechanism = context.getProperty(KafkaClientComponent.SASL_MECHANISM).asDescribedValue(SaslMechanism.class);
final SaslMechanism saslMechanism = context.getProperty(KafkaClientComponent.SASL_MECHANISM).asAllowableValue(SaslMechanism.class);
final LoginConfigProvider loginConfigProvider = PROVIDERS.getOrDefault(saslMechanism, SCRAM_PROVIDER);
return loginConfigProvider.getConfiguration(context);
}

View File

@ -86,7 +86,7 @@ public class StandardKafkaPropertyProvider implements KafkaPropertyProvider {
final String loginConfig = LOGIN_CONFIG_PROVIDER.getConfiguration(context);
properties.put(SASL_JAAS_CONFIG.getProperty(), loginConfig);
final SaslMechanism saslMechanism = context.getProperty(SASL_MECHANISM).asDescribedValue(SaslMechanism.class);
final SaslMechanism saslMechanism = context.getProperty(SASL_MECHANISM).asAllowableValue(SaslMechanism.class);
if (saslMechanism == SaslMechanism.GSSAPI && isCustomKerberosLoginFound()) {
properties.put(SASL_LOGIN_CLASS.getProperty(), SASL_GSSAPI_CUSTOM_LOGIN_CLASS);
} else if (saslMechanism == SaslMechanism.AWS_MSK_IAM && isAwsMskIamCallbackHandlerFound()) {

View File

@ -94,7 +94,7 @@ public class KafkaClientCustomValidationFunction implements Function<ValidationC
}
private void validateKerberosCredentials(final ValidationContext validationContext, final Collection<ValidationResult> results) {
final SaslMechanism saslMechanism = validationContext.getProperty(SASL_MECHANISM).asDescribedValue(SaslMechanism.class);
final SaslMechanism saslMechanism = validationContext.getProperty(SASL_MECHANISM).asAllowableValue(SaslMechanism.class);
final String securityProtocol = validationContext.getProperty(SECURITY_PROTOCOL).getValue();
if (saslMechanism == SaslMechanism.GSSAPI && SASL_PROTOCOLS.contains(securityProtocol)) {
@ -123,7 +123,7 @@ public class KafkaClientCustomValidationFunction implements Function<ValidationC
}
private void validateUsernamePassword(final ValidationContext validationContext, final Collection<ValidationResult> results) {
final SaslMechanism saslMechanism = validationContext.getProperty(SASL_MECHANISM).asDescribedValue(SaslMechanism.class);
final SaslMechanism saslMechanism = validationContext.getProperty(SASL_MECHANISM).asAllowableValue(SaslMechanism.class);
if (USERNAME_PASSWORD_SASL_MECHANISMS.contains(saslMechanism)) {
final String username = validationContext.getProperty(SASL_USERNAME).evaluateAttributeExpressions().getValue();
@ -151,7 +151,7 @@ public class KafkaClientCustomValidationFunction implements Function<ValidationC
private void validateAwsMskIamMechanism(final ValidationContext validationContext, final Collection<ValidationResult> results) {
final PropertyValue saslMechanismProperty = validationContext.getProperty(SASL_MECHANISM);
if (saslMechanismProperty.isSet()) {
final SaslMechanism saslMechanism = saslMechanismProperty.asDescribedValue(SaslMechanism.class);
final SaslMechanism saslMechanism = saslMechanismProperty.asAllowableValue(SaslMechanism.class);
if (saslMechanism == SaslMechanism.AWS_MSK_IAM && !StandardKafkaPropertyProvider.isAwsMskIamCallbackHandlerFound()) {
final String explanation = String.format("[%s] required class not found: Kafka modules must be compiled with AWS MSK enabled",

View File

@ -141,7 +141,7 @@ public class PutSnowflakeInternalStage extends AbstractProcessor {
return;
}
final SnowflakeInternalStageType internalStageType = context.getProperty(INTERNAL_STAGE_TYPE).asDescribedValue(SnowflakeInternalStageType.class);
final SnowflakeInternalStageType internalStageType = context.getProperty(INTERNAL_STAGE_TYPE).asAllowableValue(SnowflakeInternalStageType.class);
final SnowflakeInternalStageTypeParameters parameters = getSnowflakeInternalStageTypeParameters(context, flowFile);
final String internalStageName = internalStageType.getStage(parameters);
final SnowflakeConnectionProviderService connectionProviderService =

View File

@ -220,7 +220,7 @@ public class SnowflakeComputingConnectionPool extends AbstractDBCPConnectionPool
}
protected String getUrl(final ConfigurationContext context) {
final ConnectionUrlFormat connectionUrlFormat = context.getProperty(CONNECTION_URL_FORMAT).asDescribedValue(ConnectionUrlFormat.class);
final ConnectionUrlFormat connectionUrlFormat = context.getProperty(CONNECTION_URL_FORMAT).asAllowableValue(ConnectionUrlFormat.class);
final ConnectionUrlFormatParameters parameters = getConnectionUrlFormatParameters(context);
return connectionUrlFormat.buildConnectionUrl(parameters);

View File

@ -161,7 +161,7 @@ public class StandardSnowflakeIngestManagerProviderService extends AbstractContr
final PrivateKey privateKey = privateKeyService.getPrivateKey();
final AccountIdentifierFormat accountIdentifierFormat = context.getProperty(ACCOUNT_IDENTIFIER_FORMAT)
.asDescribedValue(AccountIdentifierFormat.class);
.asAllowableValue(AccountIdentifierFormat.class);
final AccountIdentifierFormatParameters parameters = getAccountIdentifierFormatParameters(context);
final String account = accountIdentifierFormat.getAccount(parameters);
final String host = accountIdentifierFormat.getHostname(parameters);

View File

@ -204,13 +204,13 @@ public class FilterAttribute extends AbstractProcessor {
private static FilterMode getFilterMode(ProcessContext context) {
return context
.getProperty(FILTER_MODE)
.asDescribedValue(FilterMode.class);
.asAllowableValue(FilterMode.class);
}
private static MatchingStrategy getMatchingStrategy(ProcessContext context) {
return context
.getProperty(MATCHING_STRATEGY)
.asDescribedValue(MatchingStrategy.class);
.asAllowableValue(MatchingStrategy.class);
}
private static String getAttributeSet(ProcessContext context, FlowFile flowFile) {

View File

@ -413,7 +413,7 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor {
// get the configured port
final int port = context.getProperty(PORT).evaluateAttributeExpressions().asInteger();
final HttpProtocolStrategy httpProtocolStrategy = context.getProperty(HTTP_PROTOCOL_STRATEGY).asDescribedValue(HttpProtocolStrategy.class);
final HttpProtocolStrategy httpProtocolStrategy = context.getProperty(HTTP_PROTOCOL_STRATEGY).asAllowableValue(HttpProtocolStrategy.class);
final ServerConnector connector = createServerConnector(server,
port,
sslContextService,

View File

@ -47,7 +47,7 @@ public abstract class AbstractZendesk extends AbstractProcessor {
public void onScheduled(ProcessContext context) {
final WebClientServiceProvider webClientServiceProvider = context.getProperty(WEB_CLIENT_SERVICE_PROVIDER).asControllerService(WebClientServiceProvider.class);
final String user = context.getProperty(ZENDESK_USER).evaluateAttributeExpressions().getValue();
final ZendeskAuthenticationType authenticationType = context.getProperty(ZENDESK_AUTHENTICATION_TYPE).asDescribedValue(ZendeskAuthenticationType.class);
final ZendeskAuthenticationType authenticationType = context.getProperty(ZENDESK_AUTHENTICATION_TYPE).asAllowableValue(ZendeskAuthenticationType.class);
final String authenticationCredentials = context.getProperty(ZENDESK_AUTHENTICATION_CREDENTIAL).evaluateAttributeExpressions().getValue();
final String subdomain = context.getProperty(ZENDESK_SUBDOMAIN).evaluateAttributeExpressions().getValue();
final ZendeskAuthenticationContext authenticationContext = new ZendeskAuthenticationContext(subdomain, user, authenticationType, authenticationCredentials);

View File

@ -184,7 +184,7 @@ public class ZendeskRecordSink extends AbstractControllerService implements Reco
final String subdomain = context.getProperty(ZENDESK_SUBDOMAIN).evaluateAttributeExpressions().getValue();
final String user = context.getProperty(ZENDESK_USER).evaluateAttributeExpressions().getValue();
final ZendeskAuthenticationType authenticationType = context.getProperty(ZENDESK_AUTHENTICATION_TYPE).asDescribedValue(ZendeskAuthenticationType.class);
final ZendeskAuthenticationType authenticationType = context.getProperty(ZENDESK_AUTHENTICATION_TYPE).asAllowableValue(ZendeskAuthenticationType.class);
final String authenticationCredentials = context.getProperty(ZENDESK_AUTHENTICATION_CREDENTIAL).evaluateAttributeExpressions().getValue();
final ZendeskAuthenticationContext authenticationContext = new ZendeskAuthenticationContext(subdomain, user, authenticationType, authenticationCredentials);
final WebClientServiceProvider webClientServiceProvider = context.getProperty(WEB_CLIENT_SERVICE_PROVIDER).asControllerService(WebClientServiceProvider.class);