mirror of https://github.com/apache/nifi.git
NIFI-12572 Updated nifi-azure-bundle using current API methods
This closes #8210 Signed-off-by: David Handermann <exceptionfactory@apache.org>
This commit is contained in:
parent
6f51c58046
commit
dd5854d324
|
@ -28,19 +28,6 @@ import com.microsoft.graph.requests.extensions.IGroupCollectionRequestBuilder;
|
|||
import com.microsoft.graph.requests.extensions.IUserCollectionWithReferencesPage;
|
||||
import com.microsoft.graph.requests.extensions.IUserCollectionWithReferencesRequest;
|
||||
import com.microsoft.graph.requests.extensions.IUserCollectionWithReferencesRequestBuilder;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.ThreadFactory;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.nifi.authorization.AuthorizerConfigurationContext;
|
||||
import org.apache.nifi.authorization.Group;
|
||||
import org.apache.nifi.authorization.User;
|
||||
|
@ -58,6 +45,19 @@ import org.jetbrains.annotations.NotNull;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.ThreadFactory;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
/**
|
||||
* The AzureGraphUserGroupProvider provides support for retrieving users and
|
||||
* groups from Azure Active Directory (AAD) using graph rest-api & SDK.
|
||||
|
@ -275,7 +275,7 @@ public class AzureGraphUserGroupProvider implements UserGroupProvider {
|
|||
Arrays.stream(groupFilterList.split(","))
|
||||
.map(String::trim)
|
||||
.filter(s-> !s.isEmpty())
|
||||
.collect(Collectors.toList())
|
||||
.toList()
|
||||
);
|
||||
}
|
||||
return Collections.unmodifiableSet(groupDisplayNames);
|
||||
|
@ -309,14 +309,10 @@ public class AzureGraphUserGroupProvider implements UserGroupProvider {
|
|||
List<com.microsoft.graph.models.extensions.Group> currentPage = filterResults.getCurrentPage();
|
||||
while (currentPage != null) {
|
||||
for (com.microsoft.graph.models.extensions.Group grp : currentPage) {
|
||||
boolean filterEvaluation = true;
|
||||
if (!StringUtils.isEmpty(suffix) && !grp.displayName.endsWith(suffix)) {
|
||||
filterEvaluation = false;
|
||||
}
|
||||
if (!StringUtils.isEmpty(substring) && !grp.displayName.contains(substring)) {
|
||||
filterEvaluation = false;
|
||||
}
|
||||
if (filterEvaluation) {
|
||||
boolean suffixMatches = StringUtils.isEmpty(suffix) || grp.displayName.endsWith(suffix);
|
||||
boolean substringMatches = StringUtils.isEmpty(substring) || grp.displayName.contains(substring);
|
||||
|
||||
if (suffixMatches && substringMatches) {
|
||||
groups.add(grp.displayName);
|
||||
}
|
||||
}
|
||||
|
@ -345,7 +341,7 @@ public class AzureGraphUserGroupProvider implements UserGroupProvider {
|
|||
final List<com.microsoft.graph.models.extensions.Group> currentPage = results.getCurrentPage();
|
||||
|
||||
if (currentPage != null && !currentPage.isEmpty()) {
|
||||
final com.microsoft.graph.models.extensions.Group graphGroup = results.getCurrentPage().get(0);
|
||||
final com.microsoft.graph.models.extensions.Group graphGroup = results.getCurrentPage().getFirst();
|
||||
final Group.Builder groupBuilder =
|
||||
new Group.Builder()
|
||||
.identifier(graphGroup.id)
|
||||
|
|
|
@ -17,7 +17,9 @@
|
|||
|
||||
package org.apache.nifi.authorization.azure;
|
||||
|
||||
import static java.util.stream.Collectors.toMap;
|
||||
import org.apache.nifi.authorization.Group;
|
||||
import org.apache.nifi.authorization.User;
|
||||
import org.apache.nifi.authorization.UserAndGroups;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -25,9 +27,7 @@ import java.util.HashSet;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.nifi.authorization.Group;
|
||||
import org.apache.nifi.authorization.User;
|
||||
import org.apache.nifi.authorization.UserAndGroups;
|
||||
import static java.util.stream.Collectors.toMap;
|
||||
|
||||
public class ImmutableAzureGraphUserGroup {
|
||||
private final Set<User> users;
|
||||
|
@ -119,9 +119,7 @@ public class ImmutableAzureGraphUserGroup {
|
|||
final Map<String, Group> groupsByObjectId = new HashMap<>();
|
||||
final Map<String, Group> groupsByDisplayName = new HashMap<>();
|
||||
final Map<String, Set<Group>> groupsByUserObjectId =
|
||||
users.stream().collect(toMap(User::getIdentifier, user -> {
|
||||
return new HashSet<Group>();
|
||||
}));
|
||||
users.stream().collect(toMap(User::getIdentifier, user -> new HashSet<>()));
|
||||
|
||||
groups.forEach(group -> {
|
||||
groupsByObjectId.put(group.getIdentifier(), group);
|
||||
|
|
|
@ -31,16 +31,15 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class AzureGraphUserGroupProviderIT {
|
||||
private static final Logger logger = LoggerFactory.getLogger(AzureGraphUserGroupProviderIT.class);
|
||||
|
@ -92,7 +91,7 @@ public class AzureGraphUserGroupProviderIT {
|
|||
private UserGroupProviderInitializationContext initContext;
|
||||
|
||||
@BeforeEach
|
||||
public void setup() throws IOException {
|
||||
public void setup() {
|
||||
authContext = Mockito.mock(AuthorizerConfigurationContext.class);
|
||||
initContext = Mockito.mock(UserGroupProviderInitializationContext.class);
|
||||
|
||||
|
@ -115,7 +114,6 @@ public class AzureGraphUserGroupProviderIT {
|
|||
testingProvider.onConfigured(authContext);
|
||||
} catch (final Exception exc) {
|
||||
logger.error("Error during setup; tests cannot run on this system.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -132,11 +130,11 @@ public class AzureGraphUserGroupProviderIT {
|
|||
|
||||
setupTestingProvider();
|
||||
|
||||
assertTrue(testingProvider.getGroups().size() > 0);
|
||||
assertTrue(testingProvider.getUsers().size() > 0);
|
||||
assertFalse(testingProvider.getGroups().isEmpty());
|
||||
assertFalse(testingProvider.getUsers().isEmpty());
|
||||
UserAndGroups uag = testingProvider.getUserAndGroups(getKnownTestUserName());
|
||||
assertNotNull(uag.getUser());
|
||||
assertTrue(uag.getGroups().size() > 0);
|
||||
assertFalse(uag.getGroups().isEmpty());
|
||||
|
||||
}
|
||||
|
||||
|
@ -149,15 +147,15 @@ public class AzureGraphUserGroupProviderIT {
|
|||
|
||||
setupTestingProvider();
|
||||
|
||||
assertTrue(testingProvider.getGroups().size() > 0);
|
||||
assertTrue(testingProvider.getUsers().size() > 0);
|
||||
assertFalse(testingProvider.getGroups().isEmpty());
|
||||
assertFalse(testingProvider.getUsers().isEmpty());
|
||||
UserAndGroups uag = testingProvider.getUserAndGroups(getKnownTestUserName());
|
||||
assertNotNull(uag.getUser());
|
||||
assertTrue(uag.getGroups().size() > 0);
|
||||
assertFalse(uag.getGroups().isEmpty());
|
||||
|
||||
String knownGroupName = getKnownTestGroupName();
|
||||
List<Group> search = testingProvider.getGroups().stream().filter(g-> g.getName().equals(knownGroupName)).collect(Collectors.toList());
|
||||
assertTrue(search.size() > 0);
|
||||
List<Group> search = testingProvider.getGroups().stream().filter(g-> g.getName().equals(knownGroupName)).toList();
|
||||
assertFalse(search.isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -169,9 +167,9 @@ public class AzureGraphUserGroupProviderIT {
|
|||
.thenReturn(new MockPropertyValue(prefix));
|
||||
|
||||
setupTestingProvider();
|
||||
assertTrue(testingProvider.getGroups().size() > 0);
|
||||
List<Group> search = testingProvider.getGroups().stream().filter(g-> g.getName().equals(knownGroupName)).collect(Collectors.toList());
|
||||
assertTrue(search.size() > 0);
|
||||
assertFalse(testingProvider.getGroups().isEmpty());
|
||||
List<Group> search = testingProvider.getGroups().stream().filter(g-> g.getName().equals(knownGroupName)).toList();
|
||||
assertFalse(search.isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -183,9 +181,9 @@ public class AzureGraphUserGroupProviderIT {
|
|||
.thenReturn(new MockPropertyValue(suffix));
|
||||
|
||||
setupTestingProvider();
|
||||
assertTrue(testingProvider.getGroups().size() > 0);
|
||||
List<Group> search = testingProvider.getGroups().stream().filter(g-> g.getName().equals(knownGroupName)).collect(Collectors.toList());
|
||||
assertTrue(search.size() > 0);
|
||||
assertFalse(testingProvider.getGroups().isEmpty());
|
||||
List<Group> search = testingProvider.getGroups().stream().filter(g-> g.getName().equals(knownGroupName)).toList();
|
||||
assertFalse(search.isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -197,9 +195,9 @@ public class AzureGraphUserGroupProviderIT {
|
|||
.thenReturn(new MockPropertyValue(substring));
|
||||
|
||||
setupTestingProvider();
|
||||
assertTrue(testingProvider.getGroups().size() > 0);
|
||||
List<Group> search = testingProvider.getGroups().stream().filter( g-> g.getName().equals(knownGroupName)).collect(Collectors.toList());
|
||||
assertTrue(search.size() > 0);
|
||||
assertFalse(testingProvider.getGroups().isEmpty());
|
||||
List<Group> search = testingProvider.getGroups().stream().filter( g-> g.getName().equals(knownGroupName)).toList();
|
||||
assertFalse(search.isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -213,8 +211,8 @@ public class AzureGraphUserGroupProviderIT {
|
|||
.thenReturn(new MockPropertyValue(getGroupListInclusion()));
|
||||
|
||||
setupTestingProvider();
|
||||
assertTrue(testingProvider.getGroups().size() > 0);
|
||||
Set<Group> search = testingProvider.getGroups().stream().collect(Collectors.toSet());
|
||||
assertFalse(testingProvider.getGroups().isEmpty());
|
||||
Set<Group> search = new HashSet<>(testingProvider.getGroups());
|
||||
// check there is no duplicate group
|
||||
assertEquals(search.size(), testingProvider.getGroups().size());
|
||||
}
|
||||
|
|
|
@ -34,9 +34,7 @@ import org.apache.nifi.parameter.VerifiableParameterProvider;
|
|||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.services.azure.AzureCredentialsService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -78,15 +76,11 @@ public class AzureKeyVaultSecretsParameterProvider extends AbstractParameterProv
|
|||
|
||||
static final String GROUP_NAME_TAG = "group-name";
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES;
|
||||
|
||||
static {
|
||||
final List<PropertyDescriptor> props = new ArrayList<>();
|
||||
props.add(AZURE_CREDENTIALS_SERVICE);
|
||||
props.add(KEY_VAULT_URI);
|
||||
props.add(GROUP_NAME_PATTERN);
|
||||
PROPERTIES = Collections.unmodifiableList(props);
|
||||
}
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
AZURE_CREDENTIALS_SERVICE,
|
||||
KEY_VAULT_URI,
|
||||
GROUP_NAME_PATTERN
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
@ -94,11 +88,10 @@ public class AzureKeyVaultSecretsParameterProvider extends AbstractParameterProv
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<ParameterGroup> fetchParameters(final ConfigurationContext context) throws IOException {
|
||||
public List<ParameterGroup> fetchParameters(final ConfigurationContext context) {
|
||||
final SecretClient secretClient = configureSecretClient(context);
|
||||
final List<KeyVaultSecret> secrets = getAllSecrets(secretClient);
|
||||
final List<ParameterGroup> groups = getParameterGroupsFromSecrets(context, secrets);
|
||||
return groups;
|
||||
return getParameterGroupsFromSecrets(context, secrets);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.nifi.parameter.azure;
|
|||
|
||||
import com.azure.core.http.rest.PagedIterable;
|
||||
import com.azure.security.keyvault.secrets.SecretClient;
|
||||
import com.azure.security.keyvault.secrets.implementation.SecretPropertiesHelper;
|
||||
import com.azure.security.keyvault.secrets.models.KeyVaultSecret;
|
||||
import com.azure.security.keyvault.secrets.models.SecretProperties;
|
||||
import org.apache.nifi.components.ConfigVerificationResult;
|
||||
|
@ -37,13 +38,12 @@ import org.mockito.Mock;
|
|||
import org.mockito.Spy;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
@ -61,7 +61,7 @@ public class TestAzureKeyVaultSecretsParameterProvider {
|
|||
@Spy
|
||||
private AzureKeyVaultSecretsParameterProvider parameterProvider;
|
||||
|
||||
private final List<Parameter> mySecretParameters = Arrays.asList(
|
||||
private final List<Parameter> mySecretParameters = List.of(
|
||||
parameter("paramA", "valueA"),
|
||||
parameter("paramB", "valueB"),
|
||||
parameter("otherC", "valueOther"),
|
||||
|
@ -69,11 +69,11 @@ public class TestAzureKeyVaultSecretsParameterProvider {
|
|||
parameter("nonSensitiveE", "valueE"),
|
||||
parameter("otherF", "valueF")
|
||||
);
|
||||
private final List<Parameter> otherSecretParameters = Arrays.asList(
|
||||
private final List<Parameter> otherSecretParameters = List.of(
|
||||
parameter("paramG", "valueG"),
|
||||
parameter("otherH", "valueOther")
|
||||
);
|
||||
private final List<ParameterGroup> mockParameterGroups = Arrays.asList(
|
||||
private final List<ParameterGroup> mockParameterGroups = List.of(
|
||||
new ParameterGroup("MySecret", mySecretParameters),
|
||||
new ParameterGroup("OtherSecret", otherSecretParameters)
|
||||
);
|
||||
|
@ -84,79 +84,61 @@ public class TestAzureKeyVaultSecretsParameterProvider {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testFetchParametersWithNoSecrets() throws IOException, InitializationException {
|
||||
final List<ParameterGroup> parameterGroups = Collections.singletonList(new ParameterGroup("MySecret", Collections.emptyList()));
|
||||
public void testFetchParametersWithNoSecrets() throws InitializationException {
|
||||
final List<ParameterGroup> parameterGroups = List.of(new ParameterGroup("MySecret", Collections.emptyList()));
|
||||
mockSecretClient(parameterGroups);
|
||||
runProviderTest(0, ConfigVerificationResult.Outcome.SUCCESSFUL);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchParameters() throws IOException, InitializationException {
|
||||
public void testFetchParameters() throws InitializationException {
|
||||
mockSecretClient(mockParameterGroups);
|
||||
runProviderTest( 8, ConfigVerificationResult.Outcome.SUCCESSFUL);
|
||||
runProviderTest(8, ConfigVerificationResult.Outcome.SUCCESSFUL);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchDisabledParameters() throws IOException, InitializationException {
|
||||
final List<SecretProperties> secretPropertiesList = new ArrayList<>();
|
||||
for (final ParameterGroup group : mockParameterGroups) {
|
||||
for (final Parameter parameter : group.getParameters()) {
|
||||
final SecretProperties secretProperties = mock(SecretProperties.class);
|
||||
public void testFetchDisabledParameters() throws InitializationException {
|
||||
final List<SecretProperties> secretPropertiesList = Stream
|
||||
.generate(() -> new SecretProperties().setEnabled(false))
|
||||
.limit(mockParameterGroups.stream().mapToInt(group -> group.getParameters().size()).sum())
|
||||
.toList();
|
||||
|
||||
when(secretProperties.isEnabled()).thenReturn(false);
|
||||
|
||||
secretPropertiesList.add(secretProperties);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
final PagedIterable<SecretProperties> mockIterable = mock(PagedIterable.class);
|
||||
when(secretClient.listPropertiesOfSecrets()).thenReturn(mockIterable);
|
||||
when(mockIterable.iterator()).thenReturn(secretPropertiesList.iterator());
|
||||
runProviderTest( 0, ConfigVerificationResult.Outcome.SUCCESSFUL);
|
||||
mockListPropertiesOfSecrets(secretPropertiesList);
|
||||
runProviderTest(0, ConfigVerificationResult.Outcome.SUCCESSFUL);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchParametersWithNullTagsShouldNotThrowError() throws IOException, InitializationException {
|
||||
public void testFetchParametersWithNullTagsShouldNotThrowError() throws InitializationException {
|
||||
final List<SecretProperties> secretPropertiesList = new ArrayList<>();
|
||||
for (final ParameterGroup group : mockParameterGroups) {
|
||||
for (final Parameter parameter : group.getParameters()) {
|
||||
final String parameterName = parameter.getDescriptor().getName();
|
||||
final String parameterValue = parameter.getValue();
|
||||
final KeyVaultSecret secret = mock(KeyVaultSecret.class);
|
||||
when(secret.getName()).thenReturn(parameterName);
|
||||
when(secret.getValue()).thenReturn(parameterValue);
|
||||
|
||||
final SecretProperties secretProperties = mock(SecretProperties.class);
|
||||
when(secret.getProperties()).thenReturn(secretProperties);
|
||||
final SecretProperties secretProperties = new SecretProperties();
|
||||
SecretPropertiesHelper.setName(secretProperties, parameterName);
|
||||
secretProperties.setEnabled(true);
|
||||
|
||||
final Map<String, String> tags = null;
|
||||
when(secretProperties.getTags()).thenReturn(tags);
|
||||
final KeyVaultSecret secret = new KeyVaultSecret(parameterName, parameterValue);
|
||||
secret.setProperties(secretProperties);
|
||||
|
||||
when(secretProperties.getName()).thenReturn(parameterName);
|
||||
when(secretProperties.getVersion()).thenReturn(null);
|
||||
when(secretProperties.isEnabled()).thenReturn(true);
|
||||
when(secretClient.getSecret(eq(parameterName), any())).thenReturn(secret);
|
||||
|
||||
secretPropertiesList.add(secretProperties);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
final PagedIterable<SecretProperties> mockIterable = mock(PagedIterable.class);
|
||||
when(secretClient.listPropertiesOfSecrets()).thenReturn(mockIterable);
|
||||
when(mockIterable.iterator()).thenReturn(secretPropertiesList.iterator());
|
||||
runProviderTest( 0, ConfigVerificationResult.Outcome.SUCCESSFUL);
|
||||
mockListPropertiesOfSecrets(secretPropertiesList);
|
||||
runProviderTest(0, ConfigVerificationResult.Outcome.SUCCESSFUL);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchParametersListFailure() throws IOException, InitializationException {
|
||||
public void testFetchParametersListFailure() throws InitializationException {
|
||||
when(secretClient.listPropertiesOfSecrets()).thenThrow(new RuntimeException("Fake RuntimeException"));
|
||||
runProviderTest(0, ConfigVerificationResult.Outcome.FAILED);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchParametersWithGroupNameRegex() throws IOException, InitializationException {
|
||||
public void testFetchParametersWithGroupNameRegex() throws InitializationException {
|
||||
mockSecretClient(mockParameterGroups);
|
||||
final Map<PropertyDescriptor, String> properties = new HashMap<>();
|
||||
properties.put(AzureKeyVaultSecretsParameterProvider.GROUP_NAME_PATTERN, "MySecret");
|
||||
|
@ -169,44 +151,43 @@ public class TestAzureKeyVaultSecretsParameterProvider {
|
|||
for (final Parameter parameter : group.getParameters()) {
|
||||
final String parameterName = parameter.getDescriptor().getName();
|
||||
final String parameterValue = parameter.getValue();
|
||||
final KeyVaultSecret secret = mock(KeyVaultSecret.class);
|
||||
when(secret.getName()).thenReturn(parameterName);
|
||||
when(secret.getValue()).thenReturn(parameterValue);
|
||||
|
||||
final SecretProperties secretProperties = mock(SecretProperties.class);
|
||||
when(secret.getProperties()).thenReturn(secretProperties);
|
||||
final SecretProperties secretProperties = new SecretProperties();
|
||||
SecretPropertiesHelper.setName(secretProperties, parameterName);
|
||||
secretProperties.setTags(
|
||||
Map.of(AzureKeyVaultSecretsParameterProvider.GROUP_NAME_TAG, group.getGroupName())
|
||||
);
|
||||
secretProperties.setEnabled(true);
|
||||
|
||||
final Map<String, String> tags = new HashMap<>();
|
||||
tags.put(AzureKeyVaultSecretsParameterProvider.GROUP_NAME_TAG, group.getGroupName());
|
||||
when(secretProperties.getTags()).thenReturn(tags);
|
||||
final KeyVaultSecret secret = new KeyVaultSecret(parameterName, parameterValue);
|
||||
secret.setProperties(secretProperties);
|
||||
|
||||
when(secretProperties.getName()).thenReturn(parameterName);
|
||||
when(secretProperties.getVersion()).thenReturn(null);
|
||||
when(secretProperties.isEnabled()).thenReturn(true);
|
||||
when(secretClient.getSecret(eq(parameterName), any())).thenReturn(secret);
|
||||
|
||||
secretPropertiesList.add(secretProperties);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
mockListPropertiesOfSecrets(secretPropertiesList);
|
||||
}
|
||||
|
||||
private void mockListPropertiesOfSecrets(final List<SecretProperties> secretPropertiesList) {
|
||||
final PagedIterable<SecretProperties> mockIterable = mock(PagedIterable.class);
|
||||
when(secretClient.listPropertiesOfSecrets()).thenReturn(mockIterable);
|
||||
when(mockIterable.iterator()).thenReturn(secretPropertiesList.iterator());
|
||||
when(secretClient.listPropertiesOfSecrets()).thenReturn(mockIterable);
|
||||
}
|
||||
|
||||
private List<ParameterGroup> runProviderTest(final int expectedCount,
|
||||
final ConfigVerificationResult.Outcome expectedOutcome)
|
||||
throws IOException, InitializationException {
|
||||
final Map<PropertyDescriptor, String> properties = new HashMap<>();
|
||||
properties.put(AzureKeyVaultSecretsParameterProvider.GROUP_NAME_PATTERN, ".*");
|
||||
return runProviderTestWithProperties(expectedCount, expectedOutcome, properties);
|
||||
private void runProviderTest(final int expectedCount, final ConfigVerificationResult.Outcome expectedOutcome)
|
||||
throws InitializationException {
|
||||
runProviderTestWithProperties(
|
||||
expectedCount, expectedOutcome,
|
||||
Map.of(AzureKeyVaultSecretsParameterProvider.GROUP_NAME_PATTERN, ".*")
|
||||
);
|
||||
}
|
||||
|
||||
private List<ParameterGroup> runProviderTestWithProperties(final int expectedCount,
|
||||
final ConfigVerificationResult.Outcome expectedOutcome,
|
||||
final Map<PropertyDescriptor, String> properties)
|
||||
throws InitializationException, IOException {
|
||||
private void runProviderTestWithProperties(final int expectedCount,
|
||||
final ConfigVerificationResult.Outcome expectedOutcome,
|
||||
final Map<PropertyDescriptor, String> properties)
|
||||
throws InitializationException {
|
||||
final MockParameterProviderInitializationContext initContext = new MockParameterProviderInitializationContext("id", "name",
|
||||
new MockComponentLog("providerId", parameterProvider));
|
||||
parameterProvider.initialize(initContext);
|
||||
|
@ -219,9 +200,9 @@ public class TestAzureKeyVaultSecretsParameterProvider {
|
|||
assertThrows(RuntimeException.class, () -> parameterProvider.fetchParameters(mockConfigurationContext));
|
||||
} else {
|
||||
parameterGroups = parameterProvider.fetchParameters(mockConfigurationContext);
|
||||
final int parameterCount = (int) parameterGroups.stream()
|
||||
.flatMap(group -> group.getParameters().stream())
|
||||
.count();
|
||||
final int parameterCount = parameterGroups.stream()
|
||||
.mapToInt(group -> group.getParameters().size())
|
||||
.sum();
|
||||
assertEquals(expectedCount, parameterCount);
|
||||
}
|
||||
|
||||
|
@ -229,9 +210,7 @@ public class TestAzureKeyVaultSecretsParameterProvider {
|
|||
final List<ConfigVerificationResult> results = ((VerifiableParameterProvider) parameterProvider).verify(mockConfigurationContext, initContext.getLogger());
|
||||
|
||||
assertEquals(1, results.size());
|
||||
assertEquals(expectedOutcome, results.get(0).getOutcome());
|
||||
|
||||
return parameterGroups;
|
||||
assertEquals(expectedOutcome, results.getFirst().getOutcome());
|
||||
}
|
||||
|
||||
private static Parameter parameter(final String name, final String value) {
|
||||
|
|
|
@ -33,10 +33,8 @@ import org.apache.nifi.processors.azure.storage.utils.BlobServiceClientFactory;
|
|||
import org.apache.nifi.services.azure.storage.AzureStorageCredentialsDetails_v12;
|
||||
import org.apache.nifi.services.azure.storage.AzureStorageCredentialsService_v12;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
|
@ -80,10 +78,7 @@ public abstract class AbstractAzureBlobProcessor_v12 extends AbstractProcessor {
|
|||
.description("Unsuccessful operations will be transferred to the failure relationship.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
)));
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
|
||||
private volatile BlobServiceClientFactory clientFactory;
|
||||
|
||||
|
@ -112,9 +107,7 @@ public abstract class AbstractAzureBlobProcessor_v12 extends AbstractProcessor {
|
|||
final AzureStorageCredentialsService_v12 credentialsService = context.getProperty(storageCredentialsServiceProperty).asControllerService(AzureStorageCredentialsService_v12.class);
|
||||
final AzureStorageCredentialsDetails_v12 credentialsDetails = credentialsService.getCredentialsDetails(attributes);
|
||||
|
||||
final BlobServiceClient storageClient = clientFactory.getStorageClient(credentialsDetails);
|
||||
|
||||
return storageClient;
|
||||
return clientFactory.getStorageClient(credentialsDetails);
|
||||
}
|
||||
|
||||
protected Map<String, String> createBlobAttributesMap(BlobClient blobClient) {
|
||||
|
@ -132,7 +125,7 @@ public abstract class AbstractAzureBlobProcessor_v12 extends AbstractProcessor {
|
|||
}
|
||||
|
||||
protected void applyBlobMetadata(Map<String, String> attributes, BlobClient blobClient) {
|
||||
Supplier<BlobProperties> props = new Supplier() {
|
||||
Supplier<BlobProperties> props = new Supplier<>() {
|
||||
BlobProperties properties;
|
||||
public BlobProperties get() {
|
||||
if (properties == null) {
|
||||
|
|
|
@ -37,9 +37,7 @@ import org.apache.nifi.processors.azure.storage.utils.DataLakeServiceClientFacto
|
|||
import org.apache.nifi.services.azure.storage.ADLSCredentialsDetails;
|
||||
import org.apache.nifi.services.azure.storage.ADLSCredentialsService;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -91,10 +89,7 @@ public abstract class AbstractAzureDataLakeStorageProcessor extends AbstractProc
|
|||
.description("Files that could not be written to Azure storage for some reason are transferred to this relationship")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
)));
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
|
||||
public static final String TEMP_FILE_DIRECTORY = "_nifitempdirectory";
|
||||
|
||||
|
@ -119,12 +114,9 @@ public abstract class AbstractAzureDataLakeStorageProcessor extends AbstractProc
|
|||
final Map<String, String> attributes = flowFile != null ? flowFile.getAttributes() : Collections.emptyMap();
|
||||
|
||||
final ADLSCredentialsService credentialsService = context.getProperty(ADLS_CREDENTIALS_SERVICE).asControllerService(ADLSCredentialsService.class);
|
||||
|
||||
final ADLSCredentialsDetails credentialsDetails = credentialsService.getCredentialsDetails(attributes);
|
||||
|
||||
final DataLakeServiceClient storageClient = clientFactory.getStorageClient(credentialsDetails);
|
||||
|
||||
return storageClient;
|
||||
return clientFactory.getStorageClient(credentialsDetails);
|
||||
}
|
||||
|
||||
public static String evaluateFileSystemProperty(ProcessContext context, FlowFile flowFile) {
|
||||
|
|
|
@ -38,20 +38,19 @@ import org.apache.nifi.util.StringUtils;
|
|||
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public interface ClientSideEncryptionSupport {
|
||||
List<KeyOperation> KEY_OPERATIONS = Arrays.asList(KeyOperation.WRAP_KEY, KeyOperation.UNWRAP_KEY);
|
||||
List<KeyOperation> KEY_OPERATIONS = List.of(KeyOperation.WRAP_KEY, KeyOperation.UNWRAP_KEY);
|
||||
|
||||
PropertyDescriptor CSE_KEY_TYPE = new PropertyDescriptor.Builder()
|
||||
.name("Client-Side Encryption Key Type")
|
||||
.displayName("Client-Side Encryption Key Type")
|
||||
.required(true)
|
||||
.allowableValues(ClientSideEncryptionMethod.class)
|
||||
.defaultValue(ClientSideEncryptionMethod.NONE.getValue())
|
||||
.defaultValue(ClientSideEncryptionMethod.NONE)
|
||||
.description("Specifies the key type to use for client-side encryption.")
|
||||
.build();
|
||||
|
||||
|
@ -77,8 +76,7 @@ public interface ClientSideEncryptionSupport {
|
|||
|
||||
default Collection<ValidationResult> validateClientSideEncryptionProperties(ValidationContext validationContext) {
|
||||
final List<ValidationResult> validationResults = new ArrayList<>();
|
||||
final String cseKeyTypeValue = validationContext.getProperty(CSE_KEY_TYPE).getValue();
|
||||
final ClientSideEncryptionMethod cseKeyType = ClientSideEncryptionMethod.valueOf(cseKeyTypeValue);
|
||||
final ClientSideEncryptionMethod cseKeyType = validationContext.getProperty(CSE_KEY_TYPE).asDescribedValue(ClientSideEncryptionMethod.class);
|
||||
final String cseKeyId = validationContext.getProperty(CSE_KEY_ID).getValue();
|
||||
final String cseLocalKey = validationContext.getProperty(CSE_LOCAL_KEY).getValue();
|
||||
if (cseKeyType != ClientSideEncryptionMethod.NONE && StringUtils.isBlank(cseKeyId)) {
|
||||
|
@ -116,8 +114,7 @@ public interface ClientSideEncryptionSupport {
|
|||
}
|
||||
|
||||
default boolean isClientSideEncryptionEnabled(PropertyContext context) {
|
||||
final String cseKeyTypeValue = context.getProperty(CSE_KEY_TYPE).getValue();
|
||||
final ClientSideEncryptionMethod cseKeyType = ClientSideEncryptionMethod.valueOf(cseKeyTypeValue);
|
||||
final ClientSideEncryptionMethod cseKeyType = context.getProperty(CSE_KEY_TYPE).asDescribedValue(ClientSideEncryptionMethod.class);
|
||||
return cseKeyType != ClientSideEncryptionMethod.NONE;
|
||||
}
|
||||
|
||||
|
@ -144,18 +141,14 @@ public interface ClientSideEncryptionSupport {
|
|||
final int keySize256 = 32;
|
||||
final int keySize384 = 48;
|
||||
final int keySize512 = 64;
|
||||
switch (keyBytes.length) {
|
||||
case keySize128:
|
||||
return Optional.of(KeyWrapAlgorithm.A128KW.toString());
|
||||
case keySize192:
|
||||
return Optional.of(KeyWrapAlgorithm.A192KW.toString());
|
||||
case keySize256:
|
||||
case keySize384:
|
||||
case keySize512:
|
||||
// Default to longest allowed key length for wrap
|
||||
return Optional.of(KeyWrapAlgorithm.A256KW.toString());
|
||||
default:
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
return switch (keyBytes.length) {
|
||||
case keySize128 -> Optional.of(KeyWrapAlgorithm.A128KW.toString());
|
||||
case keySize192 -> Optional.of(KeyWrapAlgorithm.A192KW.toString());
|
||||
case keySize256, keySize384, keySize512 ->
|
||||
// Default to the longest allowed key length for wrap
|
||||
Optional.of(KeyWrapAlgorithm.A256KW.toString());
|
||||
default -> Optional.empty();
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,12 +27,6 @@ import com.azure.cosmos.CosmosException;
|
|||
import com.azure.cosmos.models.CosmosContainerProperties;
|
||||
import com.azure.cosmos.models.CosmosContainerResponse;
|
||||
import com.azure.cosmos.models.CosmosDatabaseResponse;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.nifi.annotation.lifecycle.OnScheduled;
|
||||
import org.apache.nifi.annotation.lifecycle.OnStopped;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
|
@ -45,6 +39,10 @@ import org.apache.nifi.processor.Relationship;
|
|||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.services.azure.cosmos.AzureCosmosDBConnectionService;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
public abstract class AbstractAzureCosmosDBProcessor extends AbstractProcessor {
|
||||
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
|
@ -57,11 +55,6 @@ public abstract class AbstractAzureCosmosDBProcessor extends AbstractProcessor {
|
|||
.description("All FlowFiles that cannot be written to Cosmos DB are routed to this relationship")
|
||||
.build();
|
||||
|
||||
static final Relationship REL_ORIGINAL = new Relationship.Builder()
|
||||
.name("original")
|
||||
.description("All input FlowFiles that are part of a successful are routed to this relationship")
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor CONNECTION_SERVICE = new PropertyDescriptor.Builder()
|
||||
.name("azure-cosmos-db-connection-service")
|
||||
.displayName("Cosmos DB Connection Service")
|
||||
|
@ -94,28 +87,15 @@ public abstract class AbstractAzureCosmosDBProcessor extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder()
|
||||
.name("charactor-set")
|
||||
.displayName("Charactor Set")
|
||||
.description("The Character Set in which the data is encoded")
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
|
||||
.defaultValue("UTF-8")
|
||||
.build();
|
||||
|
||||
static final List<PropertyDescriptor> descriptors;
|
||||
|
||||
static {
|
||||
List<PropertyDescriptor> _temp = new ArrayList<>();
|
||||
_temp.add(CONNECTION_SERVICE);
|
||||
_temp.add(AzureCosmosDBUtils.URI);
|
||||
_temp.add(AzureCosmosDBUtils.DB_ACCESS_KEY);
|
||||
_temp.add(AzureCosmosDBUtils.CONSISTENCY);
|
||||
_temp.add(DATABASE_NAME);
|
||||
_temp.add(CONTAINER_ID);
|
||||
_temp.add(PARTITION_KEY);
|
||||
descriptors = Collections.unmodifiableList(_temp);
|
||||
}
|
||||
static final List<PropertyDescriptor> descriptors = List.of(
|
||||
CONNECTION_SERVICE,
|
||||
AzureCosmosDBUtils.URI,
|
||||
AzureCosmosDBUtils.DB_ACCESS_KEY,
|
||||
AzureCosmosDBUtils.CONSISTENCY,
|
||||
DATABASE_NAME,
|
||||
CONTAINER_ID,
|
||||
PARTITION_KEY
|
||||
);
|
||||
|
||||
private CosmosClient cosmosClient;
|
||||
private CosmosContainer container;
|
||||
|
@ -132,43 +112,24 @@ public abstract class AbstractAzureCosmosDBProcessor extends AbstractProcessor {
|
|||
final String uri = context.getProperty(AzureCosmosDBUtils.URI).getValue();
|
||||
final String accessKey = context.getProperty(AzureCosmosDBUtils.DB_ACCESS_KEY).getValue();
|
||||
final String selectedConsistency = context.getProperty(AzureCosmosDBUtils.CONSISTENCY).getValue();
|
||||
final ConsistencyLevel clevel;
|
||||
switch (selectedConsistency) {
|
||||
case AzureCosmosDBUtils.CONSISTENCY_STRONG:
|
||||
clevel = ConsistencyLevel.STRONG;
|
||||
break;
|
||||
case AzureCosmosDBUtils.CONSISTENCY_CONSISTENT_PREFIX:
|
||||
clevel = ConsistencyLevel.CONSISTENT_PREFIX;
|
||||
break;
|
||||
case AzureCosmosDBUtils.CONSISTENCY_SESSION:
|
||||
clevel = ConsistencyLevel.SESSION;
|
||||
break;
|
||||
case AzureCosmosDBUtils.CONSISTENCY_BOUNDED_STALENESS:
|
||||
clevel = ConsistencyLevel.BOUNDED_STALENESS;
|
||||
break;
|
||||
case AzureCosmosDBUtils.CONSISTENCY_EVENTUAL:
|
||||
clevel = ConsistencyLevel.EVENTUAL;
|
||||
break;
|
||||
default:
|
||||
clevel = ConsistencyLevel.SESSION;
|
||||
}
|
||||
final ConsistencyLevel consistencyLevel = AzureCosmosDBUtils.determineConsistencyLevel(selectedConsistency);
|
||||
if (cosmosClient != null) {
|
||||
onStopped();
|
||||
}
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Creating CosmosClient");
|
||||
}
|
||||
createCosmosClient(uri, accessKey, clevel);
|
||||
createCosmosClient(uri, accessKey, consistencyLevel);
|
||||
}
|
||||
getCosmosDocumentContainer(context);
|
||||
doPostActionOnSchedule(context);
|
||||
}
|
||||
|
||||
protected void createCosmosClient(final String uri, final String accessKey, final ConsistencyLevel clevel) {
|
||||
protected void createCosmosClient(final String uri, final String accessKey, final ConsistencyLevel consistencyLevel) {
|
||||
this.cosmosClient = new CosmosClientBuilder()
|
||||
.endpoint(uri)
|
||||
.key(accessKey)
|
||||
.consistencyLevel(clevel)
|
||||
.consistencyLevel(consistencyLevel)
|
||||
.buildClient();
|
||||
}
|
||||
|
||||
|
@ -194,7 +155,7 @@ public abstract class AbstractAzureCosmosDBProcessor extends AbstractProcessor {
|
|||
public final void onStopped() {
|
||||
final ComponentLog logger = getLogger();
|
||||
if (connectionService == null && cosmosClient != null) {
|
||||
// close client only when cosmoclient is created in Processor.
|
||||
// close client only when cosmoClient is created in Processor.
|
||||
if(logger.isDebugEnabled()) {
|
||||
logger.debug("Closing CosmosClient");
|
||||
}
|
||||
|
@ -235,7 +196,7 @@ public abstract class AbstractAzureCosmosDBProcessor extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(ValidationContext context) {
|
||||
List<ValidationResult> retVal = new ArrayList<>();
|
||||
List<ValidationResult> validationResults = new ArrayList<>();
|
||||
|
||||
boolean connectionServiceIsSet = context.getProperty(CONNECTION_SERVICE).isSet();
|
||||
boolean uriIsSet = context.getProperty(AzureCosmosDBUtils.URI).isSet();
|
||||
|
@ -252,7 +213,7 @@ public abstract class AbstractAzureCosmosDBProcessor extends AbstractProcessor {
|
|||
AzureCosmosDBUtils.URI.getDisplayName(),
|
||||
AzureCosmosDBUtils.DB_ACCESS_KEY.getDisplayName()
|
||||
);
|
||||
retVal.add(new ValidationResult.Builder().valid(false).explanation(msg).build());
|
||||
validationResults.add(new ValidationResult.Builder().valid(false).explanation(msg).build());
|
||||
} else if (!connectionServiceIsSet && (!uriIsSet || !accessKeyIsSet)) {
|
||||
// If connection Service is not set, Both of the Processor variable URI and accessKey
|
||||
// should be set.
|
||||
|
@ -261,21 +222,21 @@ public abstract class AbstractAzureCosmosDBProcessor extends AbstractProcessor {
|
|||
AzureCosmosDBUtils.URI.getDisplayName(),
|
||||
AzureCosmosDBUtils.DB_ACCESS_KEY.getDisplayName()
|
||||
);
|
||||
retVal.add(new ValidationResult.Builder().valid(false).explanation(msg).build());
|
||||
validationResults.add(new ValidationResult.Builder().valid(false).explanation(msg).build());
|
||||
}
|
||||
if (!databaseIsSet) {
|
||||
final String msg = AbstractAzureCosmosDBProcessor.DATABASE_NAME.getDisplayName() + " must be set.";
|
||||
retVal.add(new ValidationResult.Builder().valid(false).explanation(msg).build());
|
||||
validationResults.add(new ValidationResult.Builder().valid(false).explanation(msg).build());
|
||||
}
|
||||
if (!collectionIsSet) {
|
||||
final String msg = AbstractAzureCosmosDBProcessor.CONTAINER_ID.getDisplayName() + " must be set.";
|
||||
retVal.add(new ValidationResult.Builder().valid(false).explanation(msg).build());
|
||||
validationResults.add(new ValidationResult.Builder().valid(false).explanation(msg).build());
|
||||
}
|
||||
if (!partitionIsSet) {
|
||||
final String msg = AbstractAzureCosmosDBProcessor.PARTITION_KEY.getDisplayName() + " must be set.";
|
||||
retVal.add(new ValidationResult.Builder().valid(false).explanation(msg).build());
|
||||
validationResults.add(new ValidationResult.Builder().valid(false).explanation(msg).build());
|
||||
}
|
||||
return retVal;
|
||||
return validationResults;
|
||||
}
|
||||
|
||||
protected CosmosClient getCosmosClient() {
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.azure.cosmos.document;
|
||||
|
||||
import com.azure.cosmos.ConsistencyLevel;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
|
@ -60,4 +61,15 @@ public final class AzureCosmosDBUtils {
|
|||
CONSISTENCY_CONSISTENT_PREFIX, CONSISTENCY_EVENTUAL)
|
||||
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
|
||||
.build();
|
||||
|
||||
public static ConsistencyLevel determineConsistencyLevel(final String consistency) {
|
||||
return switch (consistency) {
|
||||
case CONSISTENCY_STRONG -> ConsistencyLevel.STRONG;
|
||||
case CONSISTENCY_CONSISTENT_PREFIX -> ConsistencyLevel.CONSISTENT_PREFIX;
|
||||
case CONSISTENCY_BOUNDED_STALENESS -> ConsistencyLevel.BOUNDED_STALENESS;
|
||||
case CONSISTENCY_EVENTUAL -> ConsistencyLevel.EVENTUAL;
|
||||
case CONSISTENCY_SESSION -> ConsistencyLevel.SESSION;
|
||||
default -> ConsistencyLevel.SESSION;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,14 +47,13 @@ import org.apache.nifi.serialization.record.util.DataTypeUtils;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@Tags({ "azure", "cosmos", "insert", "record", "put" })
|
||||
@Tags({"azure", "cosmos", "insert", "record", "put"})
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
@CapabilityDescription("This processor is a record-aware processor for inserting data into Cosmos DB with Core SQL API. It uses a configured record reader and " +
|
||||
"schema to read an incoming record set from the body of a Flowfile and then inserts those records into " +
|
||||
|
@ -67,49 +66,37 @@ public class PutAzureCosmosDBRecord extends AbstractAzureCosmosDBProcessor {
|
|||
static final AllowableValue UPSERT_CONFLICT = new AllowableValue("UPSERT", "Upsert", "Conflicting records will be upserted, and FlowFile will not be routed to failure");
|
||||
|
||||
static final PropertyDescriptor RECORD_READER_FACTORY = new PropertyDescriptor.Builder()
|
||||
.name("record-reader")
|
||||
.displayName("Record Reader")
|
||||
.description("Specifies the Controller Service to use for parsing incoming data and determining the data's schema")
|
||||
.identifiesControllerService(RecordReaderFactory.class)
|
||||
.required(true)
|
||||
.build();
|
||||
.name("record-reader")
|
||||
.displayName("Record Reader")
|
||||
.description("Specifies the Controller Service to use for parsing incoming data and determining the data's schema")
|
||||
.identifiesControllerService(RecordReaderFactory.class)
|
||||
.required(true)
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor INSERT_BATCH_SIZE = new PropertyDescriptor.Builder()
|
||||
.name("insert-batch-size")
|
||||
.displayName("Insert Batch Size")
|
||||
.description("The number of records to group together for one single insert operation against Cosmos DB")
|
||||
.defaultValue("20")
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
|
||||
.build();
|
||||
.name("insert-batch-size")
|
||||
.displayName("Insert Batch Size")
|
||||
.description("The number of records to group together for one single insert operation against Cosmos DB")
|
||||
.defaultValue("20")
|
||||
.required(false)
|
||||
.addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor CONFLICT_HANDLE_STRATEGY = new PropertyDescriptor.Builder()
|
||||
.name("azure-cosmos-db-conflict-handling-strategy")
|
||||
.displayName("Cosmos DB Conflict Handling Strategy")
|
||||
.description("Choose whether to ignore or upsert when conflict error occurs during insertion")
|
||||
.required(false)
|
||||
.allowableValues(IGNORE_CONFLICT, UPSERT_CONFLICT)
|
||||
.defaultValue(IGNORE_CONFLICT.getValue())
|
||||
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
|
||||
.build();
|
||||
.name("azure-cosmos-db-conflict-handling-strategy")
|
||||
.displayName("Cosmos DB Conflict Handling Strategy")
|
||||
.description("Choose whether to ignore or upsert when conflict error occurs during insertion")
|
||||
.required(false)
|
||||
.allowableValues(IGNORE_CONFLICT, UPSERT_CONFLICT)
|
||||
.defaultValue(IGNORE_CONFLICT)
|
||||
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
|
||||
.build();
|
||||
|
||||
|
||||
private final static Set<Relationship> relationships;
|
||||
private final static List<PropertyDescriptor> propertyDescriptors;
|
||||
|
||||
static {
|
||||
List<PropertyDescriptor> _propertyDescriptors = new ArrayList<>();
|
||||
_propertyDescriptors.addAll(descriptors);
|
||||
_propertyDescriptors.add(RECORD_READER_FACTORY);
|
||||
_propertyDescriptors.add(INSERT_BATCH_SIZE);
|
||||
_propertyDescriptors.add(CONFLICT_HANDLE_STRATEGY);
|
||||
propertyDescriptors = Collections.unmodifiableList(_propertyDescriptors);
|
||||
|
||||
final Set<Relationship> _relationships = new HashSet<>();
|
||||
_relationships.add(REL_SUCCESS);
|
||||
_relationships.add(REL_FAILURE);
|
||||
relationships = Collections.unmodifiableSet(_relationships);
|
||||
}
|
||||
private final static Set<Relationship> relationships = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
private final static List<PropertyDescriptor> propertyDescriptors = Stream.concat(
|
||||
descriptors.stream(),
|
||||
Stream.of(RECORD_READER_FACTORY, INSERT_BATCH_SIZE, CONFLICT_HANDLE_STRATEGY)
|
||||
).toList();
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
|
@ -121,19 +108,19 @@ public class PutAzureCosmosDBRecord extends AbstractAzureCosmosDBProcessor {
|
|||
return propertyDescriptors;
|
||||
}
|
||||
|
||||
protected void bulkInsert(final List<Map<String, Object>> records) throws CosmosException{
|
||||
protected void bulkInsert(final List<Map<String, Object>> records) throws CosmosException {
|
||||
// In the future, this method will be replaced by calling createItems API
|
||||
// for example, this.container.createItems(records);
|
||||
// currently, no createItems API available in Azure Cosmos Java SDK
|
||||
final ComponentLog logger = getLogger();
|
||||
final CosmosContainer container = getContainer();
|
||||
for (Map<String, Object> record : records){
|
||||
for (Map<String, Object> record : records) {
|
||||
try {
|
||||
container.createItem(record);
|
||||
} catch (ConflictException e) {
|
||||
// insert with unique id is expected. In case conflict occurs, use the selected strategy.
|
||||
// By default, it will ignore.
|
||||
if (conflictHandlingStrategy != null && conflictHandlingStrategy.equals(UPSERT_CONFLICT.getValue())){
|
||||
if (conflictHandlingStrategy != null && conflictHandlingStrategy.equals(UPSERT_CONFLICT.getValue())) {
|
||||
container.upsertItem(record);
|
||||
} else {
|
||||
if (logger.isDebugEnabled()) {
|
||||
|
@ -168,7 +155,7 @@ public class PutAzureCosmosDBRecord extends AbstractAzureCosmosDBProcessor {
|
|||
while ((record = reader.nextRecord()) != null) {
|
||||
// Convert each Record to HashMap
|
||||
Map<String, Object> contentMap = (Map<String, Object>) DataTypeUtils.convertRecordFieldtoObject(record, RecordFieldType.RECORD.getRecordDataType(schema));
|
||||
if(contentMap.containsKey("id")) {
|
||||
if (contentMap.containsKey("id")) {
|
||||
final Object idObj = contentMap.get("id");
|
||||
final String idStr = (idObj == null) ? "" : String.valueOf(idObj);
|
||||
if (idObj == null || StringUtils.isBlank(idStr)) {
|
||||
|
|
|
@ -32,12 +32,10 @@ import org.apache.nifi.processor.ProcessSession;
|
|||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.services.azure.data.explorer.KustoQueryService;
|
||||
import org.apache.nifi.services.azure.data.explorer.KustoQueryResponse;
|
||||
import org.apache.nifi.services.azure.data.explorer.KustoQueryService;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -92,9 +90,9 @@ public class QueryAzureDataExplorer extends AbstractProcessor {
|
|||
|
||||
protected static final String APPLICATION_JSON = "application/json";
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = new LinkedHashSet<>(Arrays.asList(SUCCESS, FAILURE));
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(SUCCESS, FAILURE);
|
||||
|
||||
private static final List<PropertyDescriptor> DESCRIPTORS = Arrays.asList(KUSTO_QUERY_SERVICE, DATABASE_NAME, QUERY);
|
||||
private static final List<PropertyDescriptor> DESCRIPTORS = List.of(KUSTO_QUERY_SERVICE, DATABASE_NAME, QUERY);
|
||||
|
||||
private volatile KustoQueryService service;
|
||||
|
||||
|
|
|
@ -70,6 +70,7 @@ import org.apache.nifi.serialization.WriteResult;
|
|||
import org.apache.nifi.serialization.record.Record;
|
||||
import org.apache.nifi.serialization.record.RecordSchema;
|
||||
import org.apache.nifi.shared.azure.eventhubs.AzureEventHubComponent;
|
||||
import org.apache.nifi.shared.azure.eventhubs.AzureEventHubTransportType;
|
||||
import org.apache.nifi.util.StopWatch;
|
||||
import org.apache.nifi.util.StringUtils;
|
||||
|
||||
|
@ -79,11 +80,8 @@ import java.io.InputStream;
|
|||
import java.io.OutputStream;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -186,7 +184,7 @@ public class ConsumeAzureEventHub extends AbstractSessionFactoryProcessor implem
|
|||
.description("Specify where to start receiving messages if offset is not stored in Azure Storage.")
|
||||
.required(true)
|
||||
.allowableValues(INITIAL_OFFSET_START_OF_STREAM, INITIAL_OFFSET_END_OF_STREAM)
|
||||
.defaultValue(INITIAL_OFFSET_END_OF_STREAM.getValue())
|
||||
.defaultValue(INITIAL_OFFSET_END_OF_STREAM)
|
||||
.build();
|
||||
static final PropertyDescriptor PREFETCH_COUNT = new PropertyDescriptor.Builder()
|
||||
.name("event-hub-prefetch-count")
|
||||
|
@ -275,40 +273,28 @@ public class ConsumeAzureEventHub extends AbstractSessionFactoryProcessor implem
|
|||
" the contents of the message will be routed to this Relationship as its own individual FlowFile.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS;
|
||||
private static final Set<Relationship> RECORD_RELATIONSHIPS;
|
||||
private static final List<PropertyDescriptor> PROPERTIES;
|
||||
|
||||
static {
|
||||
PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
NAMESPACE,
|
||||
EVENT_HUB_NAME,
|
||||
SERVICE_BUS_ENDPOINT,
|
||||
TRANSPORT_TYPE,
|
||||
ACCESS_POLICY_NAME,
|
||||
POLICY_PRIMARY_KEY,
|
||||
USE_MANAGED_IDENTITY,
|
||||
CONSUMER_GROUP,
|
||||
RECORD_READER,
|
||||
RECORD_WRITER,
|
||||
INITIAL_OFFSET,
|
||||
PREFETCH_COUNT,
|
||||
BATCH_SIZE,
|
||||
RECEIVE_TIMEOUT,
|
||||
STORAGE_ACCOUNT_NAME,
|
||||
STORAGE_ACCOUNT_KEY,
|
||||
STORAGE_SAS_TOKEN,
|
||||
STORAGE_CONTAINER_NAME,
|
||||
PROXY_CONFIGURATION_SERVICE
|
||||
));
|
||||
|
||||
Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
RELATIONSHIPS = Collections.unmodifiableSet(relationships);
|
||||
|
||||
relationships.add(REL_PARSE_FAILURE);
|
||||
RECORD_RELATIONSHIPS = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
private static final Set<Relationship> RECORD_RELATIONSHIPS = Set.of(REL_SUCCESS, REL_PARSE_FAILURE);
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(NAMESPACE,
|
||||
EVENT_HUB_NAME,
|
||||
SERVICE_BUS_ENDPOINT,
|
||||
TRANSPORT_TYPE,
|
||||
ACCESS_POLICY_NAME,
|
||||
POLICY_PRIMARY_KEY,
|
||||
USE_MANAGED_IDENTITY,
|
||||
CONSUMER_GROUP,
|
||||
RECORD_READER,
|
||||
RECORD_WRITER,
|
||||
INITIAL_OFFSET,
|
||||
PREFETCH_COUNT,
|
||||
BATCH_SIZE,
|
||||
RECEIVE_TIMEOUT,
|
||||
STORAGE_ACCOUNT_NAME,
|
||||
STORAGE_ACCOUNT_KEY,
|
||||
STORAGE_SAS_TOKEN,
|
||||
STORAGE_CONTAINER_NAME,
|
||||
PROXY_CONFIGURATION_SERVICE
|
||||
);
|
||||
|
||||
private volatile ProcessSessionFactory processSessionFactory;
|
||||
private volatile EventProcessorClient eventProcessorClient;
|
||||
|
@ -435,7 +421,7 @@ public class ConsumeAzureEventHub extends AbstractSessionFactoryProcessor implem
|
|||
final Long receiveTimeout = context.getProperty(RECEIVE_TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS);
|
||||
final Duration maxWaitTime = Duration.ofMillis(receiveTimeout);
|
||||
final Integer maxBatchSize = context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger();
|
||||
final AmqpTransportType transportType = AmqpTransportType.fromString(context.getProperty(TRANSPORT_TYPE).getValue());
|
||||
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asDescribedValue(AzureEventHubTransportType.class).asAmqpTransportType();
|
||||
|
||||
final EventProcessorClientBuilder eventProcessorClientBuilder = new EventProcessorClientBuilder()
|
||||
.transportType(transportType)
|
||||
|
@ -522,8 +508,7 @@ public class ConsumeAzureEventHub extends AbstractSessionFactoryProcessor implem
|
|||
final PartitionContext partitionContext = errorContext.getPartitionContext();
|
||||
final Throwable throwable = errorContext.getThrowable();
|
||||
|
||||
if (throwable instanceof AmqpException) {
|
||||
final AmqpException amqpException = (AmqpException) throwable;
|
||||
if (throwable instanceof AmqpException amqpException) {
|
||||
if (amqpException.getErrorCondition() == AmqpErrorCondition.LINK_STOLEN) {
|
||||
getLogger().info("Partition was stolen by another consumer instance from the consumer group. Namespace [{}] Event Hub [{}] Consumer Group [{}] Partition [{}]. {}",
|
||||
partitionContext.getFullyQualifiedNamespace(),
|
||||
|
|
|
@ -54,12 +54,12 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
import org.apache.nifi.processors.azure.eventhub.utils.AzureEventHubUtils;
|
||||
import org.apache.nifi.scheduling.ExecutionNode;
|
||||
import org.apache.nifi.shared.azure.eventhubs.AzureEventHubComponent;
|
||||
import org.apache.nifi.shared.azure.eventhubs.AzureEventHubTransportType;
|
||||
import org.apache.nifi.util.StopWatch;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -159,26 +159,21 @@ public class GetAzureEventHub extends AbstractProcessor implements AzureEventHub
|
|||
.description("Any FlowFile that is successfully received from the event hub will be transferred to this Relationship.")
|
||||
.build();
|
||||
|
||||
private final static List<PropertyDescriptor> propertyDescriptors;
|
||||
private final static Set<Relationship> relationships;
|
||||
|
||||
static {
|
||||
propertyDescriptors = List.of(
|
||||
NAMESPACE,
|
||||
EVENT_HUB_NAME,
|
||||
SERVICE_BUS_ENDPOINT,
|
||||
TRANSPORT_TYPE,
|
||||
ACCESS_POLICY,
|
||||
POLICY_PRIMARY_KEY,
|
||||
USE_MANAGED_IDENTITY,
|
||||
CONSUMER_GROUP,
|
||||
ENQUEUE_TIME,
|
||||
RECEIVER_FETCH_SIZE,
|
||||
RECEIVER_FETCH_TIMEOUT,
|
||||
PROXY_CONFIGURATION_SERVICE
|
||||
);
|
||||
relationships = Collections.singleton(REL_SUCCESS);
|
||||
}
|
||||
private final static List<PropertyDescriptor> propertyDescriptors = List.of(
|
||||
NAMESPACE,
|
||||
EVENT_HUB_NAME,
|
||||
SERVICE_BUS_ENDPOINT,
|
||||
TRANSPORT_TYPE,
|
||||
ACCESS_POLICY,
|
||||
POLICY_PRIMARY_KEY,
|
||||
USE_MANAGED_IDENTITY,
|
||||
CONSUMER_GROUP,
|
||||
ENQUEUE_TIME,
|
||||
RECEIVER_FETCH_SIZE,
|
||||
RECEIVER_FETCH_TIMEOUT,
|
||||
PROXY_CONFIGURATION_SERVICE
|
||||
);
|
||||
private final static Set<Relationship> relationships = Set.of(REL_SUCCESS);
|
||||
|
||||
private final Map<String, EventPosition> partitionEventPositions = new ConcurrentHashMap<>();
|
||||
|
||||
|
@ -365,7 +360,7 @@ public class GetAzureEventHub extends AbstractProcessor implements AzureEventHub
|
|||
final String serviceBusEndpoint = context.getProperty(SERVICE_BUS_ENDPOINT).getValue();
|
||||
final boolean useManagedIdentity = context.getProperty(USE_MANAGED_IDENTITY).asBoolean();
|
||||
final String fullyQualifiedNamespace = String.format("%s%s", namespace, serviceBusEndpoint);
|
||||
final AmqpTransportType transportType = AmqpTransportType.fromString(context.getProperty(TRANSPORT_TYPE).getValue());
|
||||
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asDescribedValue(AzureEventHubTransportType.class).asAmqpTransportType();
|
||||
|
||||
final EventHubClientBuilder eventHubClientBuilder = new EventHubClientBuilder();
|
||||
eventHubClientBuilder.transportType(transportType);
|
||||
|
|
|
@ -48,13 +48,13 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
import org.apache.nifi.processors.azure.eventhub.utils.AzureEventHubUtils;
|
||||
import org.apache.nifi.processors.azure.storage.utils.FlowFileResultCarrier;
|
||||
import org.apache.nifi.shared.azure.eventhubs.AzureEventHubComponent;
|
||||
import org.apache.nifi.shared.azure.eventhubs.AzureEventHubTransportType;
|
||||
import org.apache.nifi.stream.io.StreamUtils;
|
||||
import org.apache.nifi.util.StopWatch;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -119,28 +119,19 @@ public class PutAzureEventHub extends AbstractProcessor implements AzureEventHub
|
|||
.description("Any FlowFile that could not be sent to the event hub will be transferred to this Relationship.")
|
||||
.build();
|
||||
|
||||
private final static List<PropertyDescriptor> propertyDescriptors;
|
||||
private final static Set<Relationship> relationships;
|
||||
|
||||
static {
|
||||
final List<PropertyDescriptor> configuredDescriptors = new ArrayList<>();
|
||||
configuredDescriptors.add(NAMESPACE);
|
||||
configuredDescriptors.add(EVENT_HUB_NAME);
|
||||
configuredDescriptors.add(SERVICE_BUS_ENDPOINT);
|
||||
configuredDescriptors.add(TRANSPORT_TYPE);
|
||||
configuredDescriptors.add(ACCESS_POLICY);
|
||||
configuredDescriptors.add(POLICY_PRIMARY_KEY);
|
||||
configuredDescriptors.add(USE_MANAGED_IDENTITY);
|
||||
configuredDescriptors.add(PARTITIONING_KEY_ATTRIBUTE_NAME);
|
||||
configuredDescriptors.add(MAX_BATCH_SIZE);
|
||||
configuredDescriptors.add(PROXY_CONFIGURATION_SERVICE);
|
||||
propertyDescriptors = Collections.unmodifiableList(configuredDescriptors);
|
||||
|
||||
final Set<Relationship> configuredRelationships = new HashSet<>();
|
||||
configuredRelationships.add(REL_SUCCESS);
|
||||
configuredRelationships.add(REL_FAILURE);
|
||||
relationships = Collections.unmodifiableSet(configuredRelationships);
|
||||
}
|
||||
private final static List<PropertyDescriptor> propertyDescriptors = List.of(
|
||||
NAMESPACE,
|
||||
EVENT_HUB_NAME,
|
||||
SERVICE_BUS_ENDPOINT,
|
||||
TRANSPORT_TYPE,
|
||||
ACCESS_POLICY,
|
||||
POLICY_PRIMARY_KEY,
|
||||
USE_MANAGED_IDENTITY,
|
||||
PARTITIONING_KEY_ATTRIBUTE_NAME,
|
||||
MAX_BATCH_SIZE,
|
||||
PROXY_CONFIGURATION_SERVICE
|
||||
);
|
||||
private final static Set<Relationship> relationships = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
|
||||
private EventHubProducerClient eventHubProducerClient;
|
||||
|
||||
|
@ -196,7 +187,7 @@ public class PutAzureEventHub extends AbstractProcessor implements AzureEventHub
|
|||
final String namespace = context.getProperty(NAMESPACE).getValue();
|
||||
final String serviceBusEndpoint = context.getProperty(SERVICE_BUS_ENDPOINT).getValue();
|
||||
final String eventHubName = context.getProperty(EVENT_HUB_NAME).getValue();
|
||||
final AmqpTransportType transportType = AmqpTransportType.fromString(context.getProperty(TRANSPORT_TYPE).getValue());
|
||||
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asDescribedValue(AzureEventHubTransportType.class).asAmqpTransportType();
|
||||
|
||||
try {
|
||||
final EventHubClientBuilder eventHubClientBuilder = new EventHubClientBuilder();
|
||||
|
@ -228,9 +219,9 @@ public class PutAzureEventHub extends AbstractProcessor implements AzureEventHub
|
|||
) {
|
||||
try {
|
||||
for (final FlowFileResultCarrier<Relationship> flowFileResult : flowFileResults) {
|
||||
final FlowFile flowFile = flowFileResult.getFlowFile();
|
||||
final FlowFile flowFile = flowFileResult.flowFile();
|
||||
|
||||
if (flowFileResult.getResult() == REL_SUCCESS) {
|
||||
if (flowFileResult.result() == REL_SUCCESS) {
|
||||
final String namespace = context.getProperty(NAMESPACE).getValue();
|
||||
final String eventHubName = context.getProperty(EVENT_HUB_NAME).getValue();
|
||||
final String serviceBusEndpoint = context.getProperty(SERVICE_BUS_ENDPOINT).getValue();
|
||||
|
@ -238,7 +229,7 @@ public class PutAzureEventHub extends AbstractProcessor implements AzureEventHub
|
|||
session.getProvenanceReporter().send(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
|
||||
session.transfer(flowFile, REL_SUCCESS);
|
||||
} else {
|
||||
final Throwable processException = flowFileResult.getException();
|
||||
final Throwable processException = flowFileResult.exception();
|
||||
getLogger().error("Send failed {}", flowFile, processException);
|
||||
session.transfer(session.penalize(flowFile), REL_FAILURE);
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ public final class AzureEventHubUtils {
|
|||
.name("use-managed-identity")
|
||||
.displayName("Use Azure Managed Identity")
|
||||
.description("Choose whether or not to use the managed identity of Azure VM/VMSS")
|
||||
.required(false).defaultValue("false").allowableValues("true", "false")
|
||||
.required(true).defaultValue("false").allowableValues("true", "false")
|
||||
.addValidator(StandardValidators.BOOLEAN_VALIDATOR).build();
|
||||
|
||||
public static final PropertyDescriptor SERVICE_BUS_ENDPOINT = new PropertyDescriptor.Builder()
|
||||
|
@ -67,14 +67,14 @@ public final class AzureEventHubUtils {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.allowableValues(AzureEventHubUtils.AZURE_ENDPOINT, AzureEventHubUtils.AZURE_CHINA_ENDPOINT,
|
||||
AzureEventHubUtils.AZURE_GERMANY_ENDPOINT, AzureEventHubUtils.AZURE_US_GOV_ENDPOINT)
|
||||
.defaultValue(AzureEventHubUtils.AZURE_ENDPOINT.getValue())
|
||||
.defaultValue(AzureEventHubUtils.AZURE_ENDPOINT)
|
||||
.required(true)
|
||||
.build();
|
||||
|
||||
public static List<ValidationResult> customValidate(PropertyDescriptor accessPolicyDescriptor,
|
||||
PropertyDescriptor policyKeyDescriptor,
|
||||
ValidationContext context) {
|
||||
List<ValidationResult> retVal = new ArrayList<>();
|
||||
List<ValidationResult> validationResults = new ArrayList<>();
|
||||
|
||||
boolean accessPolicyIsSet = context.getProperty(accessPolicyDescriptor).isSet();
|
||||
boolean policyKeyIsSet = context.getProperty(policyKeyDescriptor).isSet();
|
||||
|
@ -87,7 +87,7 @@ public final class AzureEventHubUtils {
|
|||
accessPolicyDescriptor.getDisplayName(),
|
||||
POLICY_PRIMARY_KEY.getDisplayName()
|
||||
);
|
||||
retVal.add(new ValidationResult.Builder().subject("Credentials config").valid(false).explanation(msg).build());
|
||||
validationResults.add(new ValidationResult.Builder().subject("Credentials config").valid(false).explanation(msg).build());
|
||||
} else if (!useManagedIdentity && (!accessPolicyIsSet || !policyKeyIsSet)) {
|
||||
final String msg = String.format(
|
||||
"either('%s') or (%s with '%s') must be set",
|
||||
|
@ -95,10 +95,10 @@ public final class AzureEventHubUtils {
|
|||
accessPolicyDescriptor.getDisplayName(),
|
||||
POLICY_PRIMARY_KEY.getDisplayName()
|
||||
);
|
||||
retVal.add(new ValidationResult.Builder().subject("Credentials config").valid(false).explanation(msg).build());
|
||||
validationResults.add(new ValidationResult.Builder().subject("Credentials config").valid(false).explanation(msg).build());
|
||||
}
|
||||
ProxyConfiguration.validateProxySpec(context, retVal, AzureEventHubComponent.PROXY_SPECS);
|
||||
return retVal;
|
||||
ProxyConfiguration.validateProxySpec(context, validationResults, AzureEventHubComponent.PROXY_SPECS);
|
||||
return validationResults;
|
||||
}
|
||||
|
||||
public static Map<String, String> getApplicationProperties(final Map<String, Object> eventProperties) {
|
||||
|
|
|
@ -200,7 +200,7 @@ public class CopyAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 {
|
|||
).orElse(sourceBlobName);
|
||||
|
||||
final boolean createContainer = context.getProperty(AzureStorageUtils.CREATE_CONTAINER).asBoolean();
|
||||
final AzureStorageConflictResolutionStrategy conflictResolution = AzureStorageConflictResolutionStrategy.valueOf(context.getProperty(AzureStorageUtils.CONFLICT_RESOLUTION).getValue());
|
||||
final AzureStorageConflictResolutionStrategy conflictResolution = context.getProperty(AzureStorageUtils.CONFLICT_RESOLUTION).asDescribedValue(AzureStorageConflictResolutionStrategy.class);
|
||||
|
||||
final long startNanos = System.nanoTime();
|
||||
try {
|
||||
|
|
|
@ -35,8 +35,6 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
import org.apache.nifi.processors.azure.AbstractAzureBlobProcessor_v12;
|
||||
import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -51,9 +49,7 @@ import static org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR
|
|||
public class DeleteAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 {
|
||||
|
||||
public static final AllowableValue DELETE_SNAPSHOTS_NONE = new AllowableValue("NONE", "None", "Delete the blob only.");
|
||||
|
||||
public static final AllowableValue DELETE_SNAPSHOTS_ALSO = new AllowableValue(DeleteSnapshotsOptionType.INCLUDE.name(), "Include Snapshots", "Delete the blob and its snapshots.");
|
||||
|
||||
public static final AllowableValue DELETE_SNAPSHOTS_ONLY = new AllowableValue(DeleteSnapshotsOptionType.ONLY.name(), "Delete Snapshots Only", "Delete only the blob's snapshots.");
|
||||
|
||||
public static final PropertyDescriptor CONTAINER = new PropertyDescriptor.Builder()
|
||||
|
@ -72,17 +68,17 @@ public class DeleteAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 {
|
|||
.description("Specifies the snapshot deletion options to be used when deleting a blob.")
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.allowableValues(DELETE_SNAPSHOTS_NONE, DELETE_SNAPSHOTS_ALSO, DELETE_SNAPSHOTS_ONLY)
|
||||
.defaultValue(DELETE_SNAPSHOTS_NONE.getValue())
|
||||
.defaultValue(DELETE_SNAPSHOTS_NONE)
|
||||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
STORAGE_CREDENTIALS_SERVICE,
|
||||
CONTAINER,
|
||||
BLOB_NAME,
|
||||
DELETE_SNAPSHOTS_OPTION,
|
||||
AzureStorageUtils.PROXY_CONFIGURATION_SERVICE
|
||||
));
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
@ -135,16 +131,10 @@ public class DeleteAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 {
|
|||
}
|
||||
|
||||
private String getProvenanceMessage(DeleteSnapshotsOptionType deleteSnapshotsOptionType) {
|
||||
if (deleteSnapshotsOptionType == null) {
|
||||
return "Blob deleted";
|
||||
}
|
||||
switch (deleteSnapshotsOptionType) {
|
||||
case INCLUDE:
|
||||
return "Blob deleted along with its snapshots";
|
||||
case ONLY:
|
||||
return "Blob's snapshots deleted";
|
||||
default:
|
||||
throw new IllegalArgumentException("Unhandled DeleteSnapshotsOptionType: " + deleteSnapshotsOptionType);
|
||||
}
|
||||
return switch (deleteSnapshotsOptionType) {
|
||||
case null -> "Blob deleted";
|
||||
case INCLUDE -> "Blob deleted along with its snapshots";
|
||||
case ONLY -> "Blob's snapshots deleted";
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,8 +39,6 @@ import org.apache.nifi.processors.azure.AbstractAzureDataLakeStorageProcessor;
|
|||
import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.nifi.processors.azure.storage.utils.ADLSAttributes.ATTR_NAME_FILENAME;
|
||||
|
@ -59,8 +57,8 @@ public class DeleteAzureDataLakeStorage extends AbstractAzureDataLakeStorageProc
|
|||
.displayName("Filesystem Object Type")
|
||||
.description("They type of the file system object to be deleted. It can be either folder or file.")
|
||||
.allowableValues(FS_TYPE_FILE, FS_TYPE_DIRECTORY)
|
||||
.defaultValue(FS_TYPE_FILE)
|
||||
.required(true)
|
||||
.defaultValue(FS_TYPE_FILE.toString())
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor FILE = new PropertyDescriptor.Builder()
|
||||
|
@ -73,14 +71,14 @@ public class DeleteAzureDataLakeStorage extends AbstractAzureDataLakeStorageProc
|
|||
.dependsOn(FILESYSTEM_OBJECT_TYPE, FS_TYPE_FILE)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ADLS_CREDENTIALS_SERVICE,
|
||||
FILESYSTEM,
|
||||
FILESYSTEM_OBJECT_TYPE,
|
||||
DIRECTORY,
|
||||
FILE,
|
||||
AzureStorageUtils.PROXY_CONFIGURATION_SERVICE
|
||||
));
|
||||
);
|
||||
|
||||
@Override
|
||||
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
|
||||
|
|
|
@ -20,13 +20,6 @@ import com.azure.storage.blob.BlobClient;
|
|||
import com.azure.storage.blob.BlobContainerClient;
|
||||
import com.azure.storage.blob.BlobServiceClient;
|
||||
import com.azure.storage.blob.models.BlobRange;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
|
@ -49,6 +42,13 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
import org.apache.nifi.processors.azure.AbstractAzureBlobProcessor_v12;
|
||||
import org.apache.nifi.processors.azure.ClientSideEncryptionSupport;
|
||||
import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_DESCRIPTION_BLOBNAME;
|
||||
import static org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_DESCRIPTION_BLOBTYPE;
|
||||
import static org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_DESCRIPTION_CONTAINER;
|
||||
|
@ -139,7 +139,7 @@ public class FetchAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 im
|
|||
.required(false)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
STORAGE_CREDENTIALS_SERVICE,
|
||||
CONTAINER,
|
||||
BLOB_NAME,
|
||||
|
@ -149,7 +149,7 @@ public class FetchAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 im
|
|||
CSE_KEY_TYPE,
|
||||
CSE_KEY_ID,
|
||||
CSE_LOCAL_KEY
|
||||
));
|
||||
);
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
|
||||
|
|
|
@ -24,10 +24,6 @@ import com.azure.storage.file.datalake.DataLakeServiceClient;
|
|||
import com.azure.storage.file.datalake.models.DataLakeStorageException;
|
||||
import com.azure.storage.file.datalake.models.DownloadRetryOptions;
|
||||
import com.azure.storage.file.datalake.models.FileRange;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
|
@ -48,6 +44,9 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
import org.apache.nifi.processors.azure.AbstractAzureDataLakeStorageProcessor;
|
||||
import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@Tags({"azure", "microsoft", "cloud", "storage", "adlsgen2", "datalake"})
|
||||
@SeeAlso({PutAzureDataLakeStorage.class, DeleteAzureDataLakeStorage.class, ListAzureDataLakeStorage.class})
|
||||
@CapabilityDescription("Fetch the specified file from Azure Data Lake Storage")
|
||||
|
@ -118,7 +117,7 @@ public class FetchAzureDataLakeStorage extends AbstractAzureDataLakeStorageProce
|
|||
.defaultValue("0")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ADLS_CREDENTIALS_SERVICE,
|
||||
FILESYSTEM,
|
||||
DIRECTORY,
|
||||
|
@ -127,7 +126,7 @@ public class FetchAzureDataLakeStorage extends AbstractAzureDataLakeStorageProce
|
|||
RANGE_LENGTH,
|
||||
NUM_RETRIES,
|
||||
AzureStorageUtils.PROXY_CONFIGURATION_SERVICE
|
||||
));
|
||||
);
|
||||
|
||||
@Override
|
||||
public List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
@ -162,7 +161,6 @@ public class FetchAzureDataLakeStorage extends AbstractAzureDataLakeStorageProce
|
|||
throw new ProcessException(FILE.getDisplayName() + " (" + fileName + ") points to a directory. Full path: " + fileClient.getFilePath());
|
||||
}
|
||||
|
||||
|
||||
flowFile = session.write(flowFile, os -> fileClient.readWithResponse(os, fileRange, retryOptions, null, false, null, Context.NONE));
|
||||
session.getProvenanceReporter().modifyContent(flowFile);
|
||||
session.transfer(flowFile, REL_SUCCESS);
|
||||
|
|
|
@ -54,10 +54,8 @@ import org.apache.nifi.services.azure.storage.AzureStorageCredentialsService_v12
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -136,7 +134,7 @@ public class ListAzureBlobStorage_v12 extends AbstractListAzureProcessor<BlobInf
|
|||
.dependsOn(LISTING_STRATEGY, BY_ENTITIES)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
STORAGE_CREDENTIALS_SERVICE,
|
||||
CONTAINER,
|
||||
BLOB_NAME_PREFIX,
|
||||
|
@ -150,7 +148,7 @@ public class ListAzureBlobStorage_v12 extends AbstractListAzureProcessor<BlobInf
|
|||
MIN_SIZE,
|
||||
MAX_SIZE,
|
||||
AzureStorageUtils.PROXY_CONFIGURATION_SERVICE
|
||||
));
|
||||
);
|
||||
|
||||
private volatile BlobServiceClientFactory clientFactory;
|
||||
|
||||
|
@ -228,10 +226,7 @@ public class ListAzureBlobStorage_v12 extends AbstractListAzureProcessor<BlobInf
|
|||
final ListBlobsOptions options = new ListBlobsOptions()
|
||||
.setPrefix(prefix);
|
||||
|
||||
final Iterator<BlobItem> result = containerClient.listBlobs(options, null).iterator();
|
||||
|
||||
while (result.hasNext()) {
|
||||
final BlobItem blob = result.next();
|
||||
for (BlobItem blob : containerClient.listBlobs(options, null)) {
|
||||
final BlobItemProperties properties = blob.getProperties();
|
||||
|
||||
if (isFileInfoMatchesWithAgeAndSize(context, minimumTimestamp, properties.getLastModified().toInstant().toEpochMilli(), properties.getContentLength())) {
|
||||
|
|
|
@ -51,16 +51,13 @@ import org.apache.nifi.services.azure.storage.ADLSCredentialsDetails;
|
|||
import org.apache.nifi.services.azure.storage.ADLSCredentialsService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.nifi.processor.util.list.ListedEntityTracker.INITIAL_LISTING_TARGET;
|
||||
import static org.apache.nifi.processor.util.list.ListedEntityTracker.TRACKING_STATE_CACHE;
|
||||
|
@ -144,7 +141,7 @@ public class ListAzureDataLakeStorage extends AbstractListAzureProcessor<ADLSFil
|
|||
.defaultValue(Boolean.FALSE.toString())
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ADLS_CREDENTIALS_SERVICE,
|
||||
FILESYSTEM,
|
||||
DIRECTORY,
|
||||
|
@ -161,16 +158,18 @@ public class ListAzureDataLakeStorage extends AbstractListAzureProcessor<ADLSFil
|
|||
MAX_AGE,
|
||||
MIN_SIZE,
|
||||
MAX_SIZE,
|
||||
AzureStorageUtils.PROXY_CONFIGURATION_SERVICE));
|
||||
AzureStorageUtils.PROXY_CONFIGURATION_SERVICE
|
||||
);
|
||||
|
||||
private static final Set<PropertyDescriptor> LISTING_RESET_PROPERTIES = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||
private static final Set<PropertyDescriptor> LISTING_RESET_PROPERTIES = Set.of(
|
||||
ADLS_CREDENTIALS_SERVICE,
|
||||
FILESYSTEM,
|
||||
DIRECTORY,
|
||||
RECURSE_SUBDIRECTORIES,
|
||||
FILE_FILTER,
|
||||
PATH_FILTER,
|
||||
LISTING_STRATEGY)));
|
||||
LISTING_STRATEGY
|
||||
);
|
||||
|
||||
private volatile Pattern filePattern;
|
||||
private volatile Pattern pathPattern;
|
||||
|
@ -288,7 +287,7 @@ public class ListAzureDataLakeStorage extends AbstractListAzureProcessor<ADLSFil
|
|||
final boolean includeTempFiles = context.getProperty(INCLUDE_TEMPORARY_FILES).asBoolean();
|
||||
final long minimumTimestamp = minTimestamp == null ? 0 : minTimestamp;
|
||||
|
||||
final List<ADLSFileInfo> listing = fileSystemClient.listPaths(options, null).stream()
|
||||
return fileSystemClient.listPaths(options, null).stream()
|
||||
.filter(pathItem -> !pathItem.isDirectory())
|
||||
.filter(pathItem -> includeTempFiles || !pathItem.getName().contains(TEMP_FILE_DIRECTORY))
|
||||
.filter(pathItem -> isFileInfoMatchesWithAgeAndSize(context, minimumTimestamp, pathItem.getLastModified().toInstant().toEpochMilli(), pathItem.getContentLength()))
|
||||
|
@ -299,11 +298,10 @@ public class ListAzureDataLakeStorage extends AbstractListAzureProcessor<ADLSFil
|
|||
.lastModified(pathItem.getLastModified().toInstant().toEpochMilli())
|
||||
.etag(pathItem.getETag())
|
||||
.build())
|
||||
.filter(fileInfo -> applyFilters && (filePattern == null || filePattern.matcher(fileInfo.getFilename()).matches()))
|
||||
.filter(fileInfo -> applyFilters && (pathPattern == null || pathPattern.matcher(RegExUtils.removeFirst(fileInfo.getDirectory(), baseDirectoryPattern)).matches()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return listing;
|
||||
.filter(fileInfo -> applyFilters)
|
||||
.filter(fileInfo -> filePattern == null || filePattern.matcher(fileInfo.getFilename()).matches())
|
||||
.filter(fileInfo -> pathPattern == null || pathPattern.matcher(RegExUtils.removeFirst(fileInfo.getDirectory(), baseDirectoryPattern)).matches())
|
||||
.toList();
|
||||
} catch (final Exception e) {
|
||||
getLogger().error("Failed to list directory on Azure Data Lake Storage", e);
|
||||
throw new IOException(ExceptionUtils.getRootCause(e));
|
||||
|
|
|
@ -38,8 +38,6 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
import org.apache.nifi.processors.azure.AbstractAzureDataLakeStorageProcessor;
|
||||
import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -78,7 +76,6 @@ public class MoveAzureDataLakeStorage extends AbstractAzureDataLakeStorageProces
|
|||
public static final String REPLACE_RESOLUTION = "replace";
|
||||
public static final String IGNORE_RESOLUTION = "ignore";
|
||||
|
||||
|
||||
public static final PropertyDescriptor CONFLICT_RESOLUTION = new PropertyDescriptor.Builder()
|
||||
.name("conflict-resolution-strategy")
|
||||
.displayName("Conflict Resolution Strategy")
|
||||
|
@ -125,7 +122,7 @@ public class MoveAzureDataLakeStorage extends AbstractAzureDataLakeStorageProces
|
|||
.addValidator(new DirectoryValidator("Destination Directory"))
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ADLS_CREDENTIALS_SERVICE,
|
||||
SOURCE_FILESYSTEM,
|
||||
SOURCE_DIRECTORY,
|
||||
|
@ -134,7 +131,7 @@ public class MoveAzureDataLakeStorage extends AbstractAzureDataLakeStorageProces
|
|||
FILE,
|
||||
CONFLICT_RESOLUTION,
|
||||
AzureStorageUtils.PROXY_CONFIGURATION_SERVICE
|
||||
));
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
|
|
@ -50,9 +50,7 @@ import org.apache.nifi.services.azure.storage.AzureStorageConflictResolutionStra
|
|||
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -83,8 +81,8 @@ import static org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR
|
|||
import static org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_MIME_TYPE;
|
||||
import static org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_PRIMARY_URI;
|
||||
import static org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_TIMESTAMP;
|
||||
import static org.apache.nifi.processors.transfer.ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE;
|
||||
import static org.apache.nifi.processors.transfer.ResourceTransferProperties.FILE_RESOURCE_SERVICE;
|
||||
import static org.apache.nifi.processors.transfer.ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE;
|
||||
import static org.apache.nifi.processors.transfer.ResourceTransferUtils.getFileResource;
|
||||
|
||||
@Tags({"azure", "microsoft", "cloud", "storage", "blob"})
|
||||
|
@ -104,7 +102,8 @@ import static org.apache.nifi.processors.transfer.ResourceTransferUtils.getFileR
|
|||
@WritesAttribute(attribute = ATTR_NAME_ERROR_CODE, description = ATTR_DESCRIPTION_ERROR_CODE),
|
||||
@WritesAttribute(attribute = ATTR_NAME_IGNORED, description = ATTR_DESCRIPTION_IGNORED)})
|
||||
public class PutAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 implements ClientSideEncryptionSupport {
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
STORAGE_CREDENTIALS_SERVICE,
|
||||
AzureStorageUtils.CONTAINER,
|
||||
AzureStorageUtils.CREATE_CONTAINER,
|
||||
|
@ -116,7 +115,7 @@ public class PutAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 impl
|
|||
CSE_KEY_TYPE,
|
||||
CSE_KEY_ID,
|
||||
CSE_LOCAL_KEY
|
||||
));
|
||||
);
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
|
||||
|
@ -139,7 +138,7 @@ public class PutAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 impl
|
|||
final String containerName = context.getProperty(AzureStorageUtils.CONTAINER).evaluateAttributeExpressions(flowFile).getValue();
|
||||
final boolean createContainer = context.getProperty(AzureStorageUtils.CREATE_CONTAINER).asBoolean();
|
||||
final String blobName = context.getProperty(BLOB_NAME).evaluateAttributeExpressions(flowFile).getValue();
|
||||
final AzureStorageConflictResolutionStrategy conflictResolution = AzureStorageConflictResolutionStrategy.valueOf(context.getProperty(AzureStorageUtils.CONFLICT_RESOLUTION).getValue());
|
||||
final AzureStorageConflictResolutionStrategy conflictResolution = context.getProperty(AzureStorageUtils.CONFLICT_RESOLUTION).asDescribedValue(AzureStorageConflictResolutionStrategy.class);
|
||||
final ResourceTransferSource resourceTransferSource = ResourceTransferSource.valueOf(context.getProperty(RESOURCE_TRANSFER_SOURCE).getValue());
|
||||
|
||||
long startNanos = System.nanoTime();
|
||||
|
@ -217,5 +216,4 @@ public class PutAzureBlobStorage_v12 extends AbstractAzureBlobProcessor_v12 impl
|
|||
attributes.put(ATTR_NAME_LANG, null);
|
||||
attributes.put(ATTR_NAME_MIME_TYPE, APPLICATION_OCTET_STREAM);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -44,8 +44,6 @@ import org.apache.nifi.util.StringUtils;
|
|||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -63,8 +61,8 @@ import static org.apache.nifi.processors.azure.storage.utils.ADLSAttributes.ATTR
|
|||
import static org.apache.nifi.processors.azure.storage.utils.ADLSAttributes.ATTR_NAME_FILESYSTEM;
|
||||
import static org.apache.nifi.processors.azure.storage.utils.ADLSAttributes.ATTR_NAME_LENGTH;
|
||||
import static org.apache.nifi.processors.azure.storage.utils.ADLSAttributes.ATTR_NAME_PRIMARY_URI;
|
||||
import static org.apache.nifi.processors.transfer.ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE;
|
||||
import static org.apache.nifi.processors.transfer.ResourceTransferProperties.FILE_RESOURCE_SERVICE;
|
||||
import static org.apache.nifi.processors.transfer.ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE;
|
||||
import static org.apache.nifi.processors.transfer.ResourceTransferUtils.getFileResource;
|
||||
|
||||
@Tags({"azure", "microsoft", "cloud", "storage", "adlsgen2", "datalake"})
|
||||
|
@ -104,7 +102,7 @@ public class PutAzureDataLakeStorage extends AbstractAzureDataLakeStorageProcess
|
|||
.addValidator(new DirectoryValidator("Base Temporary Path"))
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ADLS_CREDENTIALS_SERVICE,
|
||||
FILESYSTEM,
|
||||
DIRECTORY,
|
||||
|
@ -114,7 +112,7 @@ public class PutAzureDataLakeStorage extends AbstractAzureDataLakeStorageProcess
|
|||
RESOURCE_TRANSFER_SOURCE,
|
||||
FILE_RESOURCE_SERVICE,
|
||||
AzureStorageUtils.PROXY_CONFIGURATION_SERVICE
|
||||
));
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
@ -226,7 +224,7 @@ public class PutAzureDataLakeStorage extends AbstractAzureDataLakeStorageProcess
|
|||
* This method serves as a "commit" for the upload process. Upon upload, a 0-byte file is created, then the payload is appended to it.
|
||||
* Because of that, a work-in-progress file is available for readers before the upload is complete. It is not an efficient approach in
|
||||
* case of conflicts because FlowFiles are uploaded unnecessarily, but it is a calculated risk because consistency is more important.
|
||||
*
|
||||
* <p>
|
||||
* Visible for testing
|
||||
*
|
||||
* @param sourceFileClient client of the temporary file
|
||||
|
|
|
@ -43,10 +43,8 @@ import org.apache.nifi.services.azure.storage.AzureStorageCredentialsService_v12
|
|||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -100,7 +98,7 @@ public abstract class AbstractAzureQueueStorage_v12 extends AbstractProcessor {
|
|||
.description("Unsuccessful operations will be transferred to the failure relationship.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(REL_SUCCESS, REL_FAILURE)));
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
|
||||
static final String URI_ATTRIBUTE = "azure.queue.uri";
|
||||
static final String INSERTION_TIME_ATTRIBUTE = "azure.queue.insertionTime";
|
||||
|
|
|
@ -40,9 +40,7 @@ import org.apache.nifi.proxy.ProxyConfiguration;
|
|||
import org.apache.nifi.proxy.ProxySpec;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -98,20 +96,17 @@ public class GetAzureQueueStorage_v12 extends AbstractAzureQueueStorage_v12 {
|
|||
.build();
|
||||
|
||||
private static final ProxySpec[] PROXY_SPECS = {ProxySpec.HTTP, ProxySpec.SOCKS};
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(
|
||||
Arrays.asList(
|
||||
QUEUE_NAME,
|
||||
ENDPOINT_SUFFIX,
|
||||
STORAGE_CREDENTIALS_SERVICE,
|
||||
AUTO_DELETE,
|
||||
MESSAGE_BATCH_SIZE,
|
||||
VISIBILITY_TIMEOUT,
|
||||
REQUEST_TIMEOUT,
|
||||
ProxyConfiguration.createProxyConfigPropertyDescriptor(false, PROXY_SPECS)
|
||||
)
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
QUEUE_NAME,
|
||||
ENDPOINT_SUFFIX,
|
||||
STORAGE_CREDENTIALS_SERVICE,
|
||||
AUTO_DELETE,
|
||||
MESSAGE_BATCH_SIZE,
|
||||
VISIBILITY_TIMEOUT,
|
||||
REQUEST_TIMEOUT,
|
||||
ProxyConfiguration.createProxyConfigPropertyDescriptor(false, PROXY_SPECS)
|
||||
);
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Collections.singleton(REL_SUCCESS);
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
// 7 days is the maximum timeout as per https://learn.microsoft.com/en-us/rest/api/storageservices/get-messages
|
||||
private static final Duration MAX_VISIBILITY_TIMEOUT = Duration.ofDays(7);
|
||||
|
@ -194,7 +189,7 @@ public class GetAzureQueueStorage_v12 extends AbstractAzureQueueStorage_v12 {
|
|||
flowFile = session.write(flowFile, out -> out.write(message.getBody().toString().getBytes()));
|
||||
|
||||
session.transfer(flowFile, REL_SUCCESS);
|
||||
session.getProvenanceReporter().receive(flowFile, queueClient.getQueueUrl().toString());
|
||||
session.getProvenanceReporter().receive(flowFile, queueClient.getQueueUrl());
|
||||
}
|
||||
|
||||
if (autoDelete) {
|
||||
|
|
|
@ -36,9 +36,7 @@ import org.apache.nifi.proxy.ProxySpec;
|
|||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.time.Duration;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -67,16 +65,14 @@ public class PutAzureQueueStorage_v12 extends AbstractAzureQueueStorage_v12 {
|
|||
.build();
|
||||
|
||||
private static final ProxySpec[] PROXY_SPECS = {ProxySpec.HTTP, ProxySpec.SOCKS};
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(
|
||||
Arrays.asList(
|
||||
QUEUE_NAME,
|
||||
ENDPOINT_SUFFIX,
|
||||
STORAGE_CREDENTIALS_SERVICE,
|
||||
MESSAGE_TIME_TO_LIVE,
|
||||
VISIBILITY_TIMEOUT,
|
||||
REQUEST_TIMEOUT,
|
||||
ProxyConfiguration.createProxyConfigPropertyDescriptor(false, PROXY_SPECS)
|
||||
)
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
QUEUE_NAME,
|
||||
ENDPOINT_SUFFIX,
|
||||
STORAGE_CREDENTIALS_SERVICE,
|
||||
MESSAGE_TIME_TO_LIVE,
|
||||
VISIBILITY_TIMEOUT,
|
||||
REQUEST_TIMEOUT,
|
||||
ProxyConfiguration.createProxyConfigPropertyDescriptor(false, PROXY_SPECS)
|
||||
);
|
||||
|
||||
// 7 days is the maximum timeout as per https://learn.microsoft.com/en-us/rest/api/storageservices/get-messages
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.nifi.serialization.record.RecordFieldType;
|
|||
import org.apache.nifi.serialization.record.RecordSchema;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -35,7 +34,6 @@ import java.util.Objects;
|
|||
|
||||
public class ADLSFileInfo implements Comparable<ADLSFileInfo>, Serializable, ListableEntity {
|
||||
|
||||
private static final RecordSchema SCHEMA;
|
||||
private static final String FILESYSTEM = "filesystem";
|
||||
private static final String FILE_PATH = "filePath";
|
||||
private static final String DIRECTORY = "directory";
|
||||
|
@ -44,17 +42,15 @@ public class ADLSFileInfo implements Comparable<ADLSFileInfo>, Serializable, Lis
|
|||
private static final String LAST_MODIFIED = "lastModified";
|
||||
private static final String ETAG = "etag";
|
||||
|
||||
static {
|
||||
List<RecordField> recordFields = new ArrayList<>();
|
||||
recordFields.add(new RecordField(FILESYSTEM, RecordFieldType.STRING.getDataType(), false));
|
||||
recordFields.add(new RecordField(FILE_PATH, RecordFieldType.STRING.getDataType(), false));
|
||||
recordFields.add(new RecordField(DIRECTORY, RecordFieldType.STRING.getDataType(), false));
|
||||
recordFields.add(new RecordField(FILENAME, RecordFieldType.STRING.getDataType(), false));
|
||||
recordFields.add(new RecordField(LENGTH, RecordFieldType.LONG.getDataType(), false));
|
||||
recordFields.add(new RecordField(LAST_MODIFIED, RecordFieldType.TIMESTAMP.getDataType(), false));
|
||||
recordFields.add(new RecordField(ETAG, RecordFieldType.STRING.getDataType()));
|
||||
SCHEMA = new SimpleRecordSchema(recordFields);
|
||||
}
|
||||
private static final RecordSchema SCHEMA = new SimpleRecordSchema(List.of(
|
||||
new RecordField(FILESYSTEM, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(FILE_PATH, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(DIRECTORY, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(FILENAME, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(LENGTH, RecordFieldType.LONG.getDataType(), false),
|
||||
new RecordField(LAST_MODIFIED, RecordFieldType.TIMESTAMP.getDataType(), false),
|
||||
new RecordField(ETAG, RecordFieldType.STRING.getDataType())
|
||||
));
|
||||
|
||||
private static final Comparator<ADLSFileInfo> COMPARATOR = Comparator.comparing(ADLSFileInfo::getFileSystem).thenComparing(ADLSFileInfo::getFilePath);
|
||||
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.nifi.processors.azure.storage.utils;
|
||||
|
||||
/**
|
||||
* Enumeration capturing essential information about the various client-side
|
||||
* encryption methods supported by Azure
|
||||
*/
|
||||
public enum AzureBlobClientSideEncryptionMethod {
|
||||
|
||||
NONE("None", "The blobs sent to Azure are not encrypted."),
|
||||
SYMMETRIC("Symmetric", "The blobs sent to Azure are encrypted using a symmetric algorithm.");
|
||||
|
||||
private final String cseName;
|
||||
private final String description;
|
||||
|
||||
AzureBlobClientSideEncryptionMethod(String cseName, String description) {
|
||||
this.cseName = cseName;
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getCseName() {
|
||||
return cseName;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return description;
|
||||
}
|
||||
}
|
|
@ -17,10 +17,6 @@
|
|||
package org.apache.nifi.processors.azure.storage.utils;
|
||||
|
||||
import com.azure.core.http.ProxyOptions;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.Proxy;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.components.ValidationContext;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
|
@ -33,6 +29,10 @@ import org.apache.nifi.proxy.SocksVersion;
|
|||
import org.apache.nifi.services.azure.storage.AzureStorageConflictResolutionStrategy;
|
||||
import reactor.netty.http.client.HttpClient;
|
||||
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.Proxy;
|
||||
import java.util.Collection;
|
||||
|
||||
public final class AzureStorageUtils {
|
||||
public static final String STORAGE_ACCOUNT_NAME_PROPERTY_DESCRIPTOR_NAME = "storage-account-name";
|
||||
public static final String STORAGE_ACCOUNT_KEY_PROPERTY_DESCRIPTOR_NAME = "storage-account-key";
|
||||
|
@ -128,7 +128,7 @@ public final class AzureStorageUtils {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.required(true)
|
||||
.allowableValues(AzureStorageConflictResolutionStrategy.class)
|
||||
.defaultValue(AzureStorageConflictResolutionStrategy.FAIL_RESOLUTION.getValue())
|
||||
.defaultValue(AzureStorageConflictResolutionStrategy.FAIL_RESOLUTION)
|
||||
.description("Specifies whether an existing blob will have its contents replaced upon conflict.")
|
||||
.build();
|
||||
|
||||
|
@ -215,7 +215,6 @@ public final class AzureStorageUtils {
|
|||
final ProxyConfiguration proxyConfiguration = ProxyConfiguration.getConfiguration(propertyContext);
|
||||
|
||||
if (proxyConfiguration != ProxyConfiguration.DIRECT_CONFIGURATION) {
|
||||
|
||||
final ProxyOptions proxyOptions = new ProxyOptions(
|
||||
getProxyType(proxyConfiguration),
|
||||
new InetSocketAddress(proxyConfiguration.getProxyServerHost(), proxyConfiguration.getProxyServerPort()));
|
||||
|
|
|
@ -16,12 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.azure.storage.utils;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.nifi.processor.util.list.ListableEntity;
|
||||
import org.apache.nifi.serialization.SimpleRecordSchema;
|
||||
import org.apache.nifi.serialization.record.MapRecord;
|
||||
|
@ -30,10 +24,14 @@ import org.apache.nifi.serialization.record.RecordField;
|
|||
import org.apache.nifi.serialization.record.RecordFieldType;
|
||||
import org.apache.nifi.serialization.record.RecordSchema;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class BlobInfo implements Comparable<BlobInfo>, Serializable, ListableEntity {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static final RecordSchema SCHEMA;
|
||||
private static final String BLOB_NAME = "blobName";
|
||||
private static final String BLOB_TYPE = "blobType";
|
||||
private static final String FILENAME = "filename";
|
||||
|
@ -46,22 +44,19 @@ public class BlobInfo implements Comparable<BlobInfo>, Serializable, ListableEnt
|
|||
private static final String PRIMARY_URI = "primaryUri";
|
||||
private static final String SECONDARY_URI = "secondaryUri";
|
||||
|
||||
static {
|
||||
final List<RecordField> recordFields = new ArrayList<>();
|
||||
recordFields.add(new RecordField(BLOB_NAME, RecordFieldType.STRING.getDataType(), false));
|
||||
recordFields.add(new RecordField(BLOB_TYPE, RecordFieldType.STRING.getDataType(), false));
|
||||
recordFields.add(new RecordField(FILENAME, RecordFieldType.STRING.getDataType(), false));
|
||||
recordFields.add(new RecordField(CONTAINER_NAME, RecordFieldType.BOOLEAN.getDataType(), false));
|
||||
recordFields.add(new RecordField(LENGTH, RecordFieldType.LONG.getDataType(), false));
|
||||
recordFields.add(new RecordField(LAST_MODIFIED, RecordFieldType.TIMESTAMP.getDataType(), false));
|
||||
recordFields.add(new RecordField(ETAG, RecordFieldType.STRING.getDataType()));
|
||||
recordFields.add(new RecordField(CONTENT_LANGUAGE, RecordFieldType.STRING.getDataType()));
|
||||
recordFields.add(new RecordField(CONTENT_TYPE, RecordFieldType.STRING.getDataType()));
|
||||
recordFields.add(new RecordField(PRIMARY_URI, RecordFieldType.STRING.getDataType()));
|
||||
recordFields.add(new RecordField(SECONDARY_URI, RecordFieldType.STRING.getDataType()));
|
||||
SCHEMA = new SimpleRecordSchema(recordFields);
|
||||
}
|
||||
|
||||
private static final RecordSchema SCHEMA = new SimpleRecordSchema(List.of(
|
||||
new RecordField(BLOB_NAME, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(BLOB_TYPE, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(FILENAME, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(CONTAINER_NAME, RecordFieldType.BOOLEAN.getDataType(), false),
|
||||
new RecordField(LENGTH, RecordFieldType.LONG.getDataType(), false),
|
||||
new RecordField(LAST_MODIFIED, RecordFieldType.TIMESTAMP.getDataType(), false),
|
||||
new RecordField(ETAG, RecordFieldType.STRING.getDataType()),
|
||||
new RecordField(CONTENT_LANGUAGE, RecordFieldType.STRING.getDataType()),
|
||||
new RecordField(CONTENT_TYPE, RecordFieldType.STRING.getDataType()),
|
||||
new RecordField(PRIMARY_URI, RecordFieldType.STRING.getDataType()),
|
||||
new RecordField(SECONDARY_URI, RecordFieldType.STRING.getDataType())
|
||||
));
|
||||
|
||||
private final String primaryUri;
|
||||
private final String secondaryUri;
|
||||
|
|
|
@ -18,34 +18,8 @@ package org.apache.nifi.processors.azure.storage.utils;
|
|||
|
||||
import org.apache.nifi.flowfile.FlowFile;
|
||||
|
||||
public class FlowFileResultCarrier<T> {
|
||||
|
||||
final private FlowFile flowFile;
|
||||
final private T result;
|
||||
final private Throwable exception;
|
||||
|
||||
public record FlowFileResultCarrier<T>(FlowFile flowFile, T result, Throwable exception) {
|
||||
public FlowFileResultCarrier(FlowFile flowFile, T result) {
|
||||
this.flowFile = flowFile;
|
||||
this.result = result;
|
||||
this.exception = null;
|
||||
this(flowFile, result, null);
|
||||
}
|
||||
|
||||
public FlowFileResultCarrier(FlowFile flowFile, T result, Throwable exception) {
|
||||
this.flowFile = flowFile;
|
||||
this.result = result;
|
||||
this.exception = exception;
|
||||
}
|
||||
|
||||
public FlowFile getFlowFile() {
|
||||
return flowFile;
|
||||
}
|
||||
|
||||
public T getResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
public Throwable getException() {
|
||||
return exception;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -30,8 +30,6 @@ import org.apache.nifi.expression.ExpressionLanguageScope;
|
|||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -55,7 +53,7 @@ public class StandardAzureCredentialsControllerService extends AbstractControlle
|
|||
.required(true)
|
||||
.sensitive(false)
|
||||
.allowableValues(DEFAULT_CREDENTIAL, MANAGED_IDENTITY)
|
||||
.defaultValue(DEFAULT_CREDENTIAL.toString())
|
||||
.defaultValue(DEFAULT_CREDENTIAL)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor MANAGED_IDENTITY_CLIENT_ID = new PropertyDescriptor.Builder()
|
||||
|
@ -70,14 +68,9 @@ public class StandardAzureCredentialsControllerService extends AbstractControlle
|
|||
.dependsOn(CREDENTIAL_CONFIGURATION_STRATEGY, MANAGED_IDENTITY)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES;
|
||||
|
||||
static {
|
||||
final List<PropertyDescriptor> props = new ArrayList<>();
|
||||
props.add(CREDENTIAL_CONFIGURATION_STRATEGY);
|
||||
props.add(MANAGED_IDENTITY_CLIENT_ID);
|
||||
PROPERTIES = Collections.unmodifiableList(props);
|
||||
}
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
CREDENTIAL_CONFIGURATION_STRATEGY, MANAGED_IDENTITY_CLIENT_ID
|
||||
);
|
||||
|
||||
private TokenCredential credentials;
|
||||
|
||||
|
@ -95,9 +88,9 @@ public class StandardAzureCredentialsControllerService extends AbstractControlle
|
|||
public void onConfigured(final ConfigurationContext context) {
|
||||
final String configurationStrategy = context.getProperty(CREDENTIAL_CONFIGURATION_STRATEGY).getValue();
|
||||
|
||||
if (DEFAULT_CREDENTIAL.equals(configurationStrategy)) {
|
||||
if (DEFAULT_CREDENTIAL.getValue().equals(configurationStrategy)) {
|
||||
credentials = getDefaultAzureCredential();
|
||||
} else if (MANAGED_IDENTITY.equals(configurationStrategy)) {
|
||||
} else if (MANAGED_IDENTITY.getValue().equals(configurationStrategy)) {
|
||||
credentials = getManagedIdentityCredential(context);
|
||||
} else {
|
||||
final String errorMsg = String.format("Configuration Strategy [%s] not recognized", configurationStrategy);
|
||||
|
@ -113,10 +106,9 @@ public class StandardAzureCredentialsControllerService extends AbstractControlle
|
|||
private TokenCredential getManagedIdentityCredential(final ConfigurationContext context) {
|
||||
final String clientId = context.getProperty(MANAGED_IDENTITY_CLIENT_ID).getValue();
|
||||
|
||||
final TokenCredential managedIdentityCredential = new ManagedIdentityCredentialBuilder()
|
||||
return new ManagedIdentityCredentialBuilder()
|
||||
.clientId(clientId)
|
||||
.build();
|
||||
return managedIdentityCredential;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -17,15 +17,10 @@
|
|||
|
||||
package org.apache.nifi.services.azure.cosmos.document;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import com.azure.cosmos.ConsistencyLevel;
|
||||
import com.azure.cosmos.CosmosClient;
|
||||
import com.azure.cosmos.CosmosClientBuilder;
|
||||
import com.azure.cosmos.CosmosException;
|
||||
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.annotation.lifecycle.OnEnabled;
|
||||
|
@ -39,6 +34,10 @@ import org.apache.nifi.processors.azure.cosmos.document.AzureCosmosDBUtils;
|
|||
import org.apache.nifi.services.azure.cosmos.AzureCosmosDBConnectionService;
|
||||
import org.apache.nifi.util.StringUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
@Tags({"azure", "cosmos", "document", "service"})
|
||||
@CapabilityDescription(
|
||||
"Provides a controller service that configures a connection to Cosmos DB (Core SQL API) " +
|
||||
|
@ -54,37 +53,15 @@ public class AzureCosmosDBClientService extends AbstractControllerService implem
|
|||
public void onEnabled(final ConfigurationContext context) {
|
||||
this.uri = context.getProperty(AzureCosmosDBUtils.URI).getValue();
|
||||
this.accessKey = context.getProperty(AzureCosmosDBUtils.DB_ACCESS_KEY).getValue();
|
||||
final ConsistencyLevel clevel;
|
||||
final String selectedConsistency = context.getProperty(AzureCosmosDBUtils.CONSISTENCY).getValue();
|
||||
|
||||
switch(selectedConsistency) {
|
||||
case AzureCosmosDBUtils.CONSISTENCY_STRONG:
|
||||
clevel = ConsistencyLevel.STRONG;
|
||||
break;
|
||||
case AzureCosmosDBUtils.CONSISTENCY_CONSISTENT_PREFIX:
|
||||
clevel = ConsistencyLevel.CONSISTENT_PREFIX;
|
||||
break;
|
||||
case AzureCosmosDBUtils.CONSISTENCY_SESSION:
|
||||
clevel = ConsistencyLevel.SESSION;
|
||||
break;
|
||||
case AzureCosmosDBUtils.CONSISTENCY_BOUNDED_STALENESS:
|
||||
clevel = ConsistencyLevel.BOUNDED_STALENESS;
|
||||
break;
|
||||
case AzureCosmosDBUtils.CONSISTENCY_EVENTUAL:
|
||||
clevel = ConsistencyLevel.EVENTUAL;
|
||||
break;
|
||||
default:
|
||||
clevel = ConsistencyLevel.SESSION;
|
||||
}
|
||||
|
||||
final ConsistencyLevel consistencyLevel = AzureCosmosDBUtils.determineConsistencyLevel(selectedConsistency);
|
||||
if (this.cosmosClient != null) {
|
||||
onStopped();
|
||||
}
|
||||
consistencyLevel = clevel.toString();
|
||||
createCosmosClient(uri, accessKey, clevel);
|
||||
this.consistencyLevel = consistencyLevel.toString();
|
||||
createCosmosClient(uri, accessKey, consistencyLevel);
|
||||
}
|
||||
|
||||
|
||||
@OnStopped
|
||||
public final void onStopped() {
|
||||
if (this.cosmosClient != null) {
|
||||
|
@ -106,13 +83,11 @@ public class AzureCosmosDBClientService extends AbstractControllerService implem
|
|||
.buildClient();
|
||||
}
|
||||
|
||||
static List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
|
||||
static {
|
||||
descriptors.add(AzureCosmosDBUtils.URI);
|
||||
descriptors.add(AzureCosmosDBUtils.DB_ACCESS_KEY);
|
||||
descriptors.add(AzureCosmosDBUtils.CONSISTENCY);
|
||||
}
|
||||
static List<PropertyDescriptor> descriptors = List.of(
|
||||
AzureCosmosDBUtils.URI,
|
||||
AzureCosmosDBUtils.DB_ACCESS_KEY,
|
||||
AzureCosmosDBUtils.CONSISTENCY
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
|
|
|
@ -20,7 +20,6 @@ import org.apache.nifi.components.DescribedValue;
|
|||
|
||||
enum KustoAuthenticationStrategy implements DescribedValue {
|
||||
APPLICATION_CREDENTIALS("Application Credentials", "Azure Application Registration with Application Key"),
|
||||
|
||||
MANAGED_IDENTITY("Managed Identity", "Azure Managed Identity");
|
||||
|
||||
private final String displayName;
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.nifi.services.azure.data.explorer;
|
|||
import com.microsoft.azure.kusto.data.ClientFactory;
|
||||
import com.microsoft.azure.kusto.data.StreamingClient;
|
||||
import com.microsoft.azure.kusto.data.auth.ConnectionStringBuilder;
|
||||
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.annotation.lifecycle.OnEnabled;
|
||||
|
@ -32,7 +31,6 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
|
@ -53,7 +51,7 @@ public class StandardKustoQueryService extends AbstractControllerService impleme
|
|||
.displayName("Authentication Strategy")
|
||||
.description("Authentication method for access to Azure Data Explorer")
|
||||
.required(true)
|
||||
.defaultValue(KustoAuthenticationStrategy.MANAGED_IDENTITY.getValue())
|
||||
.defaultValue(KustoAuthenticationStrategy.MANAGED_IDENTITY)
|
||||
.allowableValues(KustoAuthenticationStrategy.class)
|
||||
.build();
|
||||
|
||||
|
@ -71,7 +69,7 @@ public class StandardKustoQueryService extends AbstractControllerService impleme
|
|||
.description("Azure Data Explorer Application Tenant Identifier for Authentication")
|
||||
.required(true)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.dependsOn(AUTHENTICATION_STRATEGY, KustoAuthenticationStrategy.APPLICATION_CREDENTIALS.getValue())
|
||||
.dependsOn(AUTHENTICATION_STRATEGY, KustoAuthenticationStrategy.APPLICATION_CREDENTIALS)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor APPLICATION_KEY = new PropertyDescriptor.Builder()
|
||||
|
@ -81,10 +79,10 @@ public class StandardKustoQueryService extends AbstractControllerService impleme
|
|||
.required(true)
|
||||
.sensitive(true)
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.dependsOn(AUTHENTICATION_STRATEGY, KustoAuthenticationStrategy.APPLICATION_CREDENTIALS.getValue())
|
||||
.dependsOn(AUTHENTICATION_STRATEGY, KustoAuthenticationStrategy.APPLICATION_CREDENTIALS)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS = Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS = List.of(
|
||||
CLUSTER_URI,
|
||||
AUTHENTICATION_STRATEGY,
|
||||
APPLICATION_CLIENT_ID,
|
||||
|
@ -144,20 +142,18 @@ public class StandardKustoQueryService extends AbstractControllerService impleme
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
private ConnectionStringBuilder getConnectionStringBuilder(final ConfigurationContext context) {
|
||||
final ConnectionStringBuilder builder;
|
||||
|
||||
final String clusterUrl = context.getProperty(CLUSTER_URI).getValue();
|
||||
final String clientId = context.getProperty(APPLICATION_CLIENT_ID).getValue();
|
||||
final KustoAuthenticationStrategy kustoAuthenticationStrategy = context.getProperty(AUTHENTICATION_STRATEGY).asDescribedValue(KustoAuthenticationStrategy.class);
|
||||
|
||||
final KustoAuthenticationStrategy kustoAuthenticationStrategy = KustoAuthenticationStrategy.valueOf(context.getProperty(AUTHENTICATION_STRATEGY).getValue());
|
||||
|
||||
if (KustoAuthenticationStrategy.MANAGED_IDENTITY == kustoAuthenticationStrategy) {
|
||||
builder = ConnectionStringBuilder.createWithAadManagedIdentity(clusterUrl, clientId);
|
||||
} else {
|
||||
final String applicationKey = context.getProperty(APPLICATION_KEY).getValue();
|
||||
final String tenantId = context.getProperty(APPLICATION_TENANT_ID).getValue();
|
||||
builder = ConnectionStringBuilder.createWithAadApplicationCredentials(clusterUrl, clientId, applicationKey, tenantId);
|
||||
}
|
||||
final ConnectionStringBuilder builder = switch (kustoAuthenticationStrategy) {
|
||||
case APPLICATION_CREDENTIALS -> {
|
||||
final String applicationKey = context.getProperty(APPLICATION_KEY).getValue();
|
||||
final String tenantId = context.getProperty(APPLICATION_TENANT_ID).getValue();
|
||||
yield ConnectionStringBuilder.createWithAadApplicationCredentials(clusterUrl, clientId, applicationKey, tenantId);
|
||||
}
|
||||
case MANAGED_IDENTITY -> ConnectionStringBuilder.createWithAadManagedIdentity(clusterUrl, clientId);
|
||||
};
|
||||
|
||||
final String vendor = System.getProperty("java.vendor");
|
||||
final String version = System.getProperty("java.version");
|
||||
|
|
|
@ -20,7 +20,6 @@ import com.azure.core.amqp.AmqpTransportType;
|
|||
import com.azure.core.credential.AzureNamedKeyCredential;
|
||||
import com.azure.identity.DefaultAzureCredential;
|
||||
import com.azure.identity.DefaultAzureCredentialBuilder;
|
||||
|
||||
import com.azure.messaging.eventhubs.EventData;
|
||||
import com.azure.messaging.eventhubs.EventDataBatch;
|
||||
import com.azure.messaging.eventhubs.EventHubClientBuilder;
|
||||
|
@ -46,11 +45,10 @@ import org.apache.nifi.serialization.WriteResult;
|
|||
import org.apache.nifi.serialization.record.Record;
|
||||
import org.apache.nifi.serialization.record.RecordSet;
|
||||
import org.apache.nifi.shared.azure.eventhubs.AzureEventHubComponent;
|
||||
import org.apache.nifi.shared.azure.eventhubs.AzureEventHubTransportType;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -60,11 +58,8 @@ import java.util.Map;
|
|||
public class AzureEventHubRecordSink extends AbstractControllerService implements RecordSinkService, AzureEventHubComponent {
|
||||
|
||||
static final AllowableValue AZURE_ENDPOINT = new AllowableValue(".servicebus.windows.net","Azure", "Default Service Bus Endpoint");
|
||||
|
||||
static final AllowableValue AZURE_CHINA_ENDPOINT = new AllowableValue(".servicebus.chinacloudapi.cn", "Azure China", "China Service Bus Endpoint");
|
||||
|
||||
static final AllowableValue AZURE_GERMANY_ENDPOINT = new AllowableValue(".servicebus.cloudapi.de", "Azure Germany", "Germany Service Bus Endpoint");
|
||||
|
||||
static final AllowableValue AZURE_US_GOV_ENDPOINT = new AllowableValue(".servicebus.usgovcloudapi.net", "Azure US Government", "United States Government Endpoint");
|
||||
|
||||
static final PropertyDescriptor SERVICE_BUS_ENDPOINT = new PropertyDescriptor.Builder()
|
||||
|
@ -78,7 +73,7 @@ public class AzureEventHubRecordSink extends AbstractControllerService implement
|
|||
AZURE_GERMANY_ENDPOINT,
|
||||
AZURE_US_GOV_ENDPOINT
|
||||
)
|
||||
.defaultValue(AZURE_ENDPOINT.getValue())
|
||||
.defaultValue(AZURE_ENDPOINT)
|
||||
.required(true)
|
||||
.build();
|
||||
|
||||
|
@ -102,7 +97,7 @@ public class AzureEventHubRecordSink extends AbstractControllerService implement
|
|||
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
|
||||
.allowableValues(AzureAuthenticationStrategy.class)
|
||||
.required(true)
|
||||
.defaultValue(AzureAuthenticationStrategy.DEFAULT_AZURE_CREDENTIAL.getValue())
|
||||
.defaultValue(AzureAuthenticationStrategy.DEFAULT_AZURE_CREDENTIAL)
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor SHARED_ACCESS_POLICY = new PropertyDescriptor.Builder()
|
||||
|
@ -110,7 +105,7 @@ public class AzureEventHubRecordSink extends AbstractControllerService implement
|
|||
.description("The name of the shared access policy. This policy must have Send claims")
|
||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.required(false)
|
||||
.dependsOn(AUTHENTICATION_STRATEGY, AzureAuthenticationStrategy.SHARED_ACCESS_KEY.getValue())
|
||||
.dependsOn(AUTHENTICATION_STRATEGY, AzureAuthenticationStrategy.SHARED_ACCESS_KEY)
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor SHARED_ACCESS_POLICY_KEY = new PropertyDescriptor.Builder()
|
||||
|
@ -119,7 +114,7 @@ public class AzureEventHubRecordSink extends AbstractControllerService implement
|
|||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.sensitive(true)
|
||||
.required(false)
|
||||
.dependsOn(AUTHENTICATION_STRATEGY, AzureAuthenticationStrategy.SHARED_ACCESS_KEY.getValue())
|
||||
.dependsOn(AUTHENTICATION_STRATEGY, AzureAuthenticationStrategy.SHARED_ACCESS_KEY)
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor PARTITION_KEY = new PropertyDescriptor.Builder()
|
||||
|
@ -130,18 +125,16 @@ public class AzureEventHubRecordSink extends AbstractControllerService implement
|
|||
.required(false)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS = Collections.unmodifiableList(
|
||||
Arrays.asList(
|
||||
SERVICE_BUS_ENDPOINT,
|
||||
EVENT_HUB_NAMESPACE,
|
||||
EVENT_HUB_NAME,
|
||||
TRANSPORT_TYPE,
|
||||
RECORD_WRITER_FACTORY,
|
||||
AUTHENTICATION_STRATEGY,
|
||||
SHARED_ACCESS_POLICY,
|
||||
SHARED_ACCESS_POLICY_KEY,
|
||||
PARTITION_KEY
|
||||
)
|
||||
private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS = List.of(
|
||||
SERVICE_BUS_ENDPOINT,
|
||||
EVENT_HUB_NAMESPACE,
|
||||
EVENT_HUB_NAME,
|
||||
TRANSPORT_TYPE,
|
||||
RECORD_WRITER_FACTORY,
|
||||
AUTHENTICATION_STRATEGY,
|
||||
SHARED_ACCESS_POLICY,
|
||||
SHARED_ACCESS_POLICY_KEY,
|
||||
PARTITION_KEY
|
||||
);
|
||||
|
||||
private volatile ConfigurationContext context;
|
||||
|
@ -184,9 +177,8 @@ public class AzureEventHubRecordSink extends AbstractControllerService implement
|
|||
final String eventHubName = context.getProperty(EVENT_HUB_NAME).evaluateAttributeExpressions().getValue();
|
||||
final String policyName = context.getProperty(SHARED_ACCESS_POLICY).getValue();
|
||||
final String policyKey = context.getProperty(SHARED_ACCESS_POLICY_KEY).getValue();
|
||||
final String authenticationStrategy = context.getProperty(AUTHENTICATION_STRATEGY).getValue();
|
||||
final AzureAuthenticationStrategy azureAuthenticationStrategy = AzureAuthenticationStrategy.valueOf(authenticationStrategy);
|
||||
final AmqpTransportType transportType = AmqpTransportType.fromString(context.getProperty(TRANSPORT_TYPE).getValue());
|
||||
final AzureAuthenticationStrategy azureAuthenticationStrategy = context.getProperty(AUTHENTICATION_STRATEGY).asDescribedValue(AzureAuthenticationStrategy.class);
|
||||
final AmqpTransportType transportType = context.getProperty(TRANSPORT_TYPE).asDescribedValue(AzureEventHubTransportType.class).asAmqpTransportType();
|
||||
client = createEventHubClient(namespace, serviceBusEndpoint, eventHubName, policyName, policyKey, azureAuthenticationStrategy, transportType);
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
|
@ -85,7 +84,7 @@ public class ADLSCredentialsControllerService extends AbstractControllerService
|
|||
|
||||
public static final PropertyDescriptor PROXY_CONFIGURATION_SERVICE = AzureStorageUtils.PROXY_CONFIGURATION_SERVICE;
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ACCOUNT_NAME,
|
||||
ENDPOINT_SUFFIX,
|
||||
AzureStorageUtils.ACCOUNT_KEY,
|
||||
|
@ -96,7 +95,7 @@ public class ADLSCredentialsControllerService extends AbstractControllerService
|
|||
SERVICE_PRINCIPAL_CLIENT_ID,
|
||||
SERVICE_PRINCIPAL_CLIENT_SECRET,
|
||||
PROXY_CONFIGURATION_SERVICE
|
||||
));
|
||||
);
|
||||
|
||||
private ConfigurationContext context;
|
||||
|
||||
|
|
|
@ -63,11 +63,11 @@ public class AzureStorageCredentialsControllerService_v12 extends AbstractContro
|
|||
.displayName("Credentials Type")
|
||||
.description("Credentials type to be used for authenticating to Azure")
|
||||
.required(true)
|
||||
.allowableValues(AzureStorageCredentialsType.ACCOUNT_KEY.getAllowableValue(),
|
||||
AzureStorageCredentialsType.SAS_TOKEN.getAllowableValue(),
|
||||
AzureStorageCredentialsType.MANAGED_IDENTITY.getAllowableValue(),
|
||||
AzureStorageCredentialsType.SERVICE_PRINCIPAL.getAllowableValue())
|
||||
.defaultValue(AzureStorageCredentialsType.SAS_TOKEN.name())
|
||||
.allowableValues(new AzureStorageCredentialsType[]{
|
||||
AzureStorageCredentialsType.ACCOUNT_KEY, AzureStorageCredentialsType.SAS_TOKEN,
|
||||
AzureStorageCredentialsType.MANAGED_IDENTITY, AzureStorageCredentialsType.SERVICE_PRINCIPAL
|
||||
})
|
||||
.defaultValue(AzureStorageCredentialsType.SAS_TOKEN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor ACCOUNT_KEY = new PropertyDescriptor.Builder()
|
||||
|
@ -76,7 +76,7 @@ public class AzureStorageCredentialsControllerService_v12 extends AbstractContro
|
|||
.description(AzureStorageUtils.ACCOUNT_KEY_BASE_DESCRIPTION)
|
||||
.required(true)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.ACCOUNT_KEY.getAllowableValue())
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.ACCOUNT_KEY)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor SAS_TOKEN = new PropertyDescriptor.Builder()
|
||||
|
@ -84,35 +84,35 @@ public class AzureStorageCredentialsControllerService_v12 extends AbstractContro
|
|||
.description(AzureStorageUtils.SAS_TOKEN_BASE_DESCRIPTION)
|
||||
.required(true)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.SAS_TOKEN.getAllowableValue())
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.SAS_TOKEN)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor MANAGED_IDENTITY_CLIENT_ID = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(AzureStorageUtils.MANAGED_IDENTITY_CLIENT_ID)
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.MANAGED_IDENTITY.getAllowableValue())
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.MANAGED_IDENTITY)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor SERVICE_PRINCIPAL_TENANT_ID = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(AzureStorageUtils.SERVICE_PRINCIPAL_TENANT_ID)
|
||||
.required(true)
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.SERVICE_PRINCIPAL.getAllowableValue())
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.SERVICE_PRINCIPAL)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor SERVICE_PRINCIPAL_CLIENT_ID = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(AzureStorageUtils.SERVICE_PRINCIPAL_CLIENT_ID)
|
||||
.required(true)
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.SERVICE_PRINCIPAL.getAllowableValue())
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.SERVICE_PRINCIPAL)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor SERVICE_PRINCIPAL_CLIENT_SECRET = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(AzureStorageUtils.SERVICE_PRINCIPAL_CLIENT_SECRET)
|
||||
.required(true)
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.SERVICE_PRINCIPAL.getAllowableValue())
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.SERVICE_PRINCIPAL)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROXY_CONFIGURATION_SERVICE = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(AzureStorageUtils.PROXY_CONFIGURATION_SERVICE)
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.SERVICE_PRINCIPAL.getAllowableValue(), AzureStorageCredentialsType.MANAGED_IDENTITY.getAllowableValue())
|
||||
.dependsOn(CREDENTIALS_TYPE, AzureStorageCredentialsType.SERVICE_PRINCIPAL, AzureStorageCredentialsType.MANAGED_IDENTITY)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
|
@ -144,7 +144,7 @@ public class AzureStorageCredentialsControllerService_v12 extends AbstractContro
|
|||
public AzureStorageCredentialsDetails_v12 getCredentialsDetails(Map<String, String> attributes) {
|
||||
String accountName = context.getProperty(ACCOUNT_NAME).getValue();
|
||||
String endpointSuffix = context.getProperty(ENDPOINT_SUFFIX).getValue();
|
||||
AzureStorageCredentialsType credentialsType = AzureStorageCredentialsType.valueOf(context.getProperty(CREDENTIALS_TYPE).getValue());
|
||||
AzureStorageCredentialsType credentialsType = context.getProperty(CREDENTIALS_TYPE).asDescribedValue(AzureStorageCredentialsType.class);
|
||||
ProxyOptions proxyOptions = AzureStorageUtils.getProxyOptions(context);
|
||||
|
||||
switch (credentialsType) {
|
||||
|
|
|
@ -31,15 +31,14 @@ public interface AzureEventHubComponent {
|
|||
.displayName("Transport Type")
|
||||
.description("Advanced Message Queuing Protocol Transport Type for communication with Azure Event Hubs")
|
||||
.allowableValues(AzureEventHubTransportType.class)
|
||||
.defaultValue(AzureEventHubTransportType.AMQP.getValue())
|
||||
.defaultValue(AzureEventHubTransportType.AMQP)
|
||||
.required(true)
|
||||
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.build();
|
||||
ProxySpec[] PROXY_SPECS = {ProxySpec.HTTP, ProxySpec.HTTP_AUTH};
|
||||
PropertyDescriptor PROXY_CONFIGURATION_SERVICE
|
||||
= new PropertyDescriptor.Builder()
|
||||
PropertyDescriptor PROXY_CONFIGURATION_SERVICE = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(ProxyConfiguration.createProxyConfigPropertyDescriptor(false, PROXY_SPECS))
|
||||
.dependsOn(TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS.getValue())
|
||||
.dependsOn(TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS)
|
||||
.build();
|
||||
}
|
||||
|
|
|
@ -16,23 +16,24 @@
|
|||
*/
|
||||
package org.apache.nifi.shared.azure.eventhubs;
|
||||
|
||||
import com.azure.core.amqp.AmqpTransportType;
|
||||
import org.apache.nifi.components.DescribedValue;
|
||||
|
||||
/**
|
||||
* Azure Event Hubs Transport Type allowable values based on AmqpTransportType values from the Azure SDK
|
||||
*/
|
||||
public enum AzureEventHubTransportType implements DescribedValue {
|
||||
AMQP("Amqp", "AMQP over TCP on ports 5671 and 5672"),
|
||||
|
||||
AMQP_WEB_SOCKETS("AmqpWebSockets", "AMQP over HTTPS with WebSockets on port 443");
|
||||
AMQP("Amqp", "AMQP over TCP on ports 5671 and 5672", AmqpTransportType.AMQP),
|
||||
AMQP_WEB_SOCKETS("AmqpWebSockets", "AMQP over HTTPS with WebSockets on port 443", AmqpTransportType.AMQP_WEB_SOCKETS);
|
||||
|
||||
private final String value;
|
||||
|
||||
private final String description;
|
||||
private final AmqpTransportType amqpTransportType;
|
||||
|
||||
AzureEventHubTransportType(final String value, final String description) {
|
||||
AzureEventHubTransportType(final String value, final String description, final AmqpTransportType amqpTransportType) {
|
||||
this.value = value;
|
||||
this.description = description;
|
||||
this.amqpTransportType = amqpTransportType;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -49,4 +50,8 @@ public enum AzureEventHubTransportType implements DescribedValue {
|
|||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public AmqpTransportType asAmqpTransportType() {
|
||||
return this.amqpTransportType;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,6 @@ public abstract class ITAbstractAzureCosmosDBDocument {
|
|||
protected static CosmosContainer container;
|
||||
|
||||
static {
|
||||
|
||||
CONFIG = new Properties();
|
||||
assertDoesNotThrow(() -> {
|
||||
final FileInputStream fis = new FileInputStream(CREDENTIALS_FILE);
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
package org.apache.nifi.processors.azure.cosmos.document;
|
||||
|
||||
import com.azure.cosmos.models.CosmosQueryRequestOptions;
|
||||
import com.azure.cosmos.util.CosmosPagedIterable;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.apache.nifi.processor.Processor;
|
||||
import org.apache.nifi.reporting.InitializationException;
|
||||
|
@ -53,25 +52,21 @@ public class ITPutAzureCosmosDBRecord extends ITAbstractAzureCosmosDBDocument {
|
|||
|
||||
@AfterEach
|
||||
public void cleanupTestCase() {
|
||||
try{
|
||||
try {
|
||||
clearTestData();
|
||||
closeClient();
|
||||
} catch(Exception e) {
|
||||
} catch (Exception ignored) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private List<JsonNode> getDataFromTestDB() {
|
||||
logger.info("getDataFromTestDB for test result validation");
|
||||
CosmosQueryRequestOptions queryOptions = new CosmosQueryRequestOptions();
|
||||
List<JsonNode> results = new ArrayList<>();
|
||||
|
||||
CosmosPagedIterable<JsonNode> response = container.queryItems(
|
||||
"select * from c order by c._ts", queryOptions, JsonNode.class );
|
||||
|
||||
response.forEach(data -> {
|
||||
results.add(data);
|
||||
});
|
||||
return results;
|
||||
return container
|
||||
.queryItems("select * from c order by c._ts", queryOptions, JsonNode.class)
|
||||
.stream().toList();
|
||||
}
|
||||
|
||||
private MockRecordParser recordReader;
|
||||
|
@ -99,32 +94,36 @@ public class ITPutAzureCosmosDBRecord extends ITAbstractAzureCosmosDBDocument {
|
|||
final RecordSchema personSchema = new SimpleRecordSchema(personFields);
|
||||
recordReader.addSchemaField("person", RecordFieldType.RECORD);
|
||||
|
||||
recordReader.addRecord("1", "A", new MapRecord(personSchema, new HashMap<String,Object>() {
|
||||
recordReader.addRecord("1", "A", new MapRecord(personSchema, new HashMap<String, Object>() {
|
||||
private static final long serialVersionUID = -3185956498135742190L;
|
||||
|
||||
{
|
||||
put("name", "John Doe");
|
||||
put("age", 48);
|
||||
put("sport", "Soccer");
|
||||
}
|
||||
}));
|
||||
recordReader.addRecord("2", "B", new MapRecord(personSchema, new HashMap<String,Object>() {
|
||||
recordReader.addRecord("2", "B", new MapRecord(personSchema, new HashMap<String, Object>() {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
{
|
||||
put("name", "Jane Doe");
|
||||
put("age", 47);
|
||||
put("sport", "Tennis");
|
||||
}
|
||||
}));
|
||||
recordReader.addRecord("3", "A", new MapRecord(personSchema, new HashMap<String,Object>() {
|
||||
recordReader.addRecord("3", "A", new MapRecord(personSchema, new HashMap<String, Object>() {
|
||||
private static final long serialVersionUID = -1329194249439570573L;
|
||||
|
||||
{
|
||||
put("name", "Sally Doe");
|
||||
put("age", 47);
|
||||
put("sport", "Curling");
|
||||
}
|
||||
}));
|
||||
recordReader.addRecord("4", "C", new MapRecord(personSchema, new HashMap<String,Object>() {
|
||||
recordReader.addRecord("4", "C", new MapRecord(personSchema, new HashMap<String, Object>() {
|
||||
private static final long serialVersionUID = -1329194249439570574L;
|
||||
|
||||
{
|
||||
put("name", "Jimmy Doe");
|
||||
put("age", 14);
|
||||
|
@ -148,10 +147,10 @@ public class ITPutAzureCosmosDBRecord extends ITAbstractAzureCosmosDBDocument {
|
|||
recordReader.addSchemaField("sport", RecordFieldType.STRING);
|
||||
|
||||
recordReader.addRecord("1", "A", "John Doe", 48, "Soccer");
|
||||
recordReader.addRecord("2", "B","Jane Doe", 47, "Tennis");
|
||||
recordReader.addRecord("2", "B", "Jane Doe", 47, "Tennis");
|
||||
recordReader.addRecord("3", "B", "Sally Doe", 47, "Curling");
|
||||
recordReader.addRecord("4", "A", "Jimmy Doe", 14, null);
|
||||
recordReader.addRecord("5", "C","Pizza Doe", 14, null);
|
||||
recordReader.addRecord("5", "C", "Pizza Doe", 14, null);
|
||||
|
||||
runner.enqueue("");
|
||||
runner.run();
|
||||
|
|
|
@ -23,8 +23,6 @@ import org.apache.nifi.reporting.InitializationException;
|
|||
import org.apache.nifi.services.azure.cosmos.document.AzureCosmosDBClientService;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
|
||||
import java.util.Random;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class MockTestBase {
|
||||
|
@ -57,11 +55,6 @@ public class MockTestBase {
|
|||
}
|
||||
}
|
||||
|
||||
private static Random random = new Random();
|
||||
public static int getRandomInt(int min, int max){
|
||||
return random.nextInt((max-min)+1) + min;
|
||||
}
|
||||
|
||||
private class MockConnectionService extends AzureCosmosDBClientService {
|
||||
@Override
|
||||
protected void createCosmosClient(final String uri, final String accessKey, final ConsistencyLevel clevel){
|
||||
|
|
|
@ -248,7 +248,7 @@ public class PutAzureCosmosDBRecordTest extends MockTestBase {
|
|||
List<Map<String, Object>> backendData = processor.getTestResults();
|
||||
assertEquals(1, backendData.size());
|
||||
//validate array data
|
||||
final Map<?, ?> arrayTestResult = backendData.get(0);
|
||||
final Map<?, ?> arrayTestResult = backendData.getFirst();
|
||||
Object[] check = (Object []) arrayTestResult.get("arrayTest");
|
||||
assertArrayEquals(new Object[]{"a", "b", "c"}, check);
|
||||
}
|
||||
|
@ -265,7 +265,7 @@ class MockPutAzureCosmosDBRecord extends PutAzureCosmosDBRecord {
|
|||
private final List<Map<String, Object>> mockBackend = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
protected void createCosmosClient(final String uri, final String accessKey, final ConsistencyLevel clevel) {
|
||||
protected void createCosmosClient(final String uri, final String accessKey, final ConsistencyLevel consistencyLevel) {
|
||||
this.setCosmosClient(mockClient);
|
||||
}
|
||||
@Override
|
||||
|
|
|
@ -89,7 +89,7 @@ public class QueryAzureDataExplorerTest {
|
|||
|
||||
runner.assertAllFlowFilesTransferred(QueryAzureDataExplorer.FAILURE);
|
||||
|
||||
final MockFlowFile flowFile = runner.getFlowFilesForRelationship(QueryAzureDataExplorer.FAILURE).iterator().next();
|
||||
final MockFlowFile flowFile = runner.getFlowFilesForRelationship(QueryAzureDataExplorer.FAILURE).getFirst();
|
||||
flowFile.assertAttributeEquals(QueryAzureDataExplorer.QUERY_ERROR_MESSAGE, ERROR_MESSAGE);
|
||||
flowFile.assertAttributeEquals(QueryAzureDataExplorer.QUERY_EXECUTED, QUERY);
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ public class QueryAzureDataExplorerTest {
|
|||
|
||||
runner.assertAllFlowFilesTransferred(QueryAzureDataExplorer.SUCCESS);
|
||||
|
||||
final MockFlowFile flowFile = runner.getFlowFilesForRelationship(QueryAzureDataExplorer.SUCCESS).iterator().next();
|
||||
final MockFlowFile flowFile = runner.getFlowFilesForRelationship(QueryAzureDataExplorer.SUCCESS).getFirst();
|
||||
flowFile.assertAttributeEquals(QueryAzureDataExplorer.QUERY_EXECUTED, QUERY);
|
||||
flowFile.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), QueryAzureDataExplorer.APPLICATION_JSON);
|
||||
flowFile.assertContentEquals(EMPTY_ARRAY);
|
||||
|
|
|
@ -85,7 +85,7 @@ public class GetAzureEventHubTest {
|
|||
testRunner.assertValid();
|
||||
testRunner.setProperty(GetAzureEventHub.RECEIVER_FETCH_TIMEOUT, "10000");
|
||||
testRunner.assertValid();
|
||||
testRunner.setProperty(GetAzureEventHub.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS.getValue());
|
||||
testRunner.setProperty(GetAzureEventHub.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS);
|
||||
testRunner.assertValid();
|
||||
configureProxyControllerService();
|
||||
testRunner.assertValid();
|
||||
|
@ -132,7 +132,7 @@ public class GetAzureEventHubTest {
|
|||
testRunner.run();
|
||||
testRunner.assertAllFlowFilesTransferred(GetAzureEventHub.REL_SUCCESS, 1);
|
||||
|
||||
final MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(GetAzureEventHub.REL_SUCCESS).get(0);
|
||||
final MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(GetAzureEventHub.REL_SUCCESS).getFirst();
|
||||
flowFile.assertContentEquals(CONTENT);
|
||||
flowFile.assertAttributeEquals("eventhub.enqueued.timestamp", ENQUEUED_TIME.toString());
|
||||
flowFile.assertAttributeEquals("eventhub.offset", Long.toString(OFFSET));
|
||||
|
|
|
@ -79,7 +79,7 @@ public class PutAzureEventHubTest {
|
|||
testRunner.assertNotValid();
|
||||
testRunner.setProperty(PutAzureEventHub.POLICY_PRIMARY_KEY, POLICY_KEY);
|
||||
testRunner.assertValid();
|
||||
testRunner.setProperty(PutAzureEventHub.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS.getValue());
|
||||
testRunner.setProperty(PutAzureEventHub.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS);
|
||||
testRunner.assertValid();
|
||||
configureProxyControllerService();
|
||||
testRunner.assertValid();
|
||||
|
|
|
@ -153,7 +153,7 @@ public class TestConsumeAzureEventHub {
|
|||
testRunner.assertNotValid();
|
||||
testRunner.setProperty(ConsumeAzureEventHub.USE_MANAGED_IDENTITY,"true");
|
||||
testRunner.assertValid();
|
||||
testRunner.setProperty(ConsumeAzureEventHub.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS.getValue());
|
||||
testRunner.setProperty(ConsumeAzureEventHub.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS);
|
||||
testRunner.assertValid();
|
||||
configureProxyControllerService();
|
||||
testRunner.assertValid();
|
||||
|
@ -199,7 +199,7 @@ public class TestConsumeAzureEventHub {
|
|||
testRunner.setProperty(ConsumeAzureEventHub.STORAGE_ACCOUNT_NAME, STORAGE_ACCOUNT_NAME);
|
||||
testRunner.setProperty(ConsumeAzureEventHub.STORAGE_SAS_TOKEN, STORAGE_TOKEN);
|
||||
testRunner.assertValid();
|
||||
testRunner.setProperty(ConsumeAzureEventHub.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS.getValue());
|
||||
testRunner.setProperty(ConsumeAzureEventHub.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS);
|
||||
testRunner.assertValid();
|
||||
configureProxyControllerService();
|
||||
testRunner.assertValid();
|
||||
|
@ -217,7 +217,7 @@ public class TestConsumeAzureEventHub {
|
|||
testRunner.setProperty(ConsumeAzureEventHub.STORAGE_ACCOUNT_NAME, STORAGE_ACCOUNT_NAME);
|
||||
testRunner.setProperty(ConsumeAzureEventHub.STORAGE_ACCOUNT_KEY, STORAGE_ACCOUNT_KEY);
|
||||
testRunner.assertValid();
|
||||
testRunner.setProperty(ConsumeAzureEventHub.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS.getValue());
|
||||
testRunner.setProperty(ConsumeAzureEventHub.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS);
|
||||
testRunner.assertValid();
|
||||
configureProxyControllerService();
|
||||
testRunner.assertValid();
|
||||
|
@ -234,13 +234,13 @@ public class TestConsumeAzureEventHub {
|
|||
|
||||
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_SUCCESS);
|
||||
assertEquals(1, flowFiles.size());
|
||||
final MockFlowFile flowFile = flowFiles.get(0);
|
||||
final MockFlowFile flowFile = flowFiles.getFirst();
|
||||
flowFile.assertContentEquals(FIRST_CONTENT);
|
||||
assertEventHubAttributesFound(flowFile);
|
||||
|
||||
final List<ProvenanceEventRecord> provenanceEvents = testRunner.getProvenanceEvents();
|
||||
assertEquals(1, provenanceEvents.size());
|
||||
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.get(0);
|
||||
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.getFirst();
|
||||
assertEquals(ProvenanceEventType.RECEIVE, provenanceEvent1.getEventType());
|
||||
assertEquals(EXPECTED_TRANSIT_URI, provenanceEvent1.getTransitUri());
|
||||
}
|
||||
|
@ -280,13 +280,13 @@ public class TestConsumeAzureEventHub {
|
|||
|
||||
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_SUCCESS);
|
||||
assertEquals(1, flowFiles.size());
|
||||
final MockFlowFile ff1 = flowFiles.get(0);
|
||||
final MockFlowFile ff1 = flowFiles.getFirst();
|
||||
ff1.assertContentEquals(FIRST_CONTENT + SECOND_CONTENT);
|
||||
assertEventHubAttributesFound(ff1);
|
||||
|
||||
final List<ProvenanceEventRecord> provenanceEvents = testRunner.getProvenanceEvents();
|
||||
assertEquals(1, provenanceEvents.size());
|
||||
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.get(0);
|
||||
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.getFirst();
|
||||
assertEquals(ProvenanceEventType.RECEIVE, provenanceEvent1.getEventType());
|
||||
assertEquals(EXPECTED_TRANSIT_URI, provenanceEvent1.getTransitUri());
|
||||
}
|
||||
|
@ -306,20 +306,20 @@ public class TestConsumeAzureEventHub {
|
|||
|
||||
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_SUCCESS);
|
||||
assertEquals(1, flowFiles.size());
|
||||
final MockFlowFile ff1 = flowFiles.get(0);
|
||||
final MockFlowFile ff1 = flowFiles.getFirst();
|
||||
ff1.assertContentEquals(FIRST_CONTENT + SECOND_CONTENT + FOURTH_CONTENT);
|
||||
assertEventHubAttributesFound(ff1);
|
||||
|
||||
final List<MockFlowFile> failedFFs = testRunner.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_PARSE_FAILURE);
|
||||
assertEquals(1, failedFFs.size());
|
||||
final MockFlowFile failed1 = failedFFs.get(0);
|
||||
final MockFlowFile failed1 = failedFFs.getFirst();
|
||||
failed1.assertContentEquals(THIRD_CONTENT);
|
||||
assertEventHubAttributesFound(failed1);
|
||||
|
||||
final List<ProvenanceEventRecord> provenanceEvents = testRunner.getProvenanceEvents();
|
||||
assertEquals(2, provenanceEvents.size());
|
||||
|
||||
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.get(0);
|
||||
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.getFirst();
|
||||
assertEquals(ProvenanceEventType.RECEIVE, provenanceEvent1.getEventType());
|
||||
assertEquals(EXPECTED_TRANSIT_URI, provenanceEvent1.getTransitUri());
|
||||
|
||||
|
@ -346,14 +346,14 @@ public class TestConsumeAzureEventHub {
|
|||
|
||||
final List<MockFlowFile> failedFFs = testRunner.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_PARSE_FAILURE);
|
||||
assertEquals(1, failedFFs.size());
|
||||
final MockFlowFile failed1 = failedFFs.get(0);
|
||||
final MockFlowFile failed1 = failedFFs.getFirst();
|
||||
failed1.assertContentEquals(FIRST_CONTENT);
|
||||
assertEventHubAttributesFound(failed1);
|
||||
|
||||
final List<ProvenanceEventRecord> provenanceEvents = testRunner.getProvenanceEvents();
|
||||
assertEquals(1, provenanceEvents.size());
|
||||
|
||||
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.get(0);
|
||||
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.getFirst();
|
||||
assertEquals(ProvenanceEventType.RECEIVE, provenanceEvent1.getEventType());
|
||||
assertEquals(EXPECTED_TRANSIT_URI, provenanceEvent1.getTransitUri());
|
||||
}
|
||||
|
@ -373,20 +373,20 @@ public class TestConsumeAzureEventHub {
|
|||
|
||||
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_SUCCESS);
|
||||
assertEquals(1, flowFiles.size());
|
||||
final MockFlowFile ff1 = flowFiles.get(0);
|
||||
final MockFlowFile ff1 = flowFiles.getFirst();
|
||||
ff1.assertContentEquals(FIRST_CONTENT + THIRD_CONTENT + FOURTH_CONTENT);
|
||||
assertEventHubAttributesFound(ff1);
|
||||
|
||||
final List<MockFlowFile> failedFFs = testRunner.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_PARSE_FAILURE);
|
||||
assertEquals(1, failedFFs.size());
|
||||
final MockFlowFile failed1 = failedFFs.get(0);
|
||||
final MockFlowFile failed1 = failedFFs.getFirst();
|
||||
failed1.assertContentEquals(SECOND_CONTENT);
|
||||
assertEventHubAttributesFound(failed1);
|
||||
|
||||
final List<ProvenanceEventRecord> provenanceEvents = testRunner.getProvenanceEvents();
|
||||
assertEquals(2, provenanceEvents.size());
|
||||
|
||||
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.get(0);
|
||||
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.getFirst();
|
||||
assertEquals(ProvenanceEventType.RECEIVE, provenanceEvent1.getEventType());
|
||||
assertEquals(EXPECTED_TRANSIT_URI, provenanceEvent1.getTransitUri());
|
||||
|
||||
|
|
|
@ -80,14 +80,13 @@ public abstract class AbstractAzureBlobStorage_v12IT extends AbstractAzureStorag
|
|||
|
||||
@Override
|
||||
protected void setUpCredentials() throws Exception {
|
||||
String serviceId = "credentials-service";
|
||||
AzureStorageCredentialsService_v12 service = new AzureStorageCredentialsControllerService_v12();
|
||||
runner.addControllerService(SERVICE_ID, service);
|
||||
runner.setProperty(service, AzureStorageCredentialsControllerService_v12.ACCOUNT_NAME, getAccountName());
|
||||
if (getEndpointSuffix() != null) {
|
||||
runner.setProperty(service, AzureStorageCredentialsControllerService_v12.ENDPOINT_SUFFIX, getEndpointSuffix());
|
||||
}
|
||||
runner.setProperty(service, AzureStorageCredentialsControllerService_v12.CREDENTIALS_TYPE, AzureStorageCredentialsType.ACCOUNT_KEY.getAllowableValue());
|
||||
runner.setProperty(service, AzureStorageCredentialsControllerService_v12.CREDENTIALS_TYPE, AzureStorageCredentialsType.ACCOUNT_KEY);
|
||||
runner.setProperty(service, AzureStorageCredentialsControllerService_v12.ACCOUNT_KEY, getAccountKey());
|
||||
runner.enableControllerService(service);
|
||||
|
||||
|
@ -179,7 +178,7 @@ public abstract class AbstractAzureBlobStorage_v12IT extends AbstractAzureStorag
|
|||
flowFile.assertAttributeEquals(BlobAttributes.ATTR_NAME_PRIMARY_URI,
|
||||
String.format("https://%s.blob.core.windows.net/%s/%s", getAccountName(), containerName, URLEncoder.encode(
|
||||
blobName,
|
||||
StandardCharsets.US_ASCII.name()
|
||||
StandardCharsets.US_ASCII
|
||||
).replace("+", "%20").replace("%2F", "/"))
|
||||
);
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ public class ITCopyAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
private void assertFlowFile(String containerName, String blobName, byte[] blobData) throws Exception {
|
||||
runner.assertAllFlowFilesTransferred(CopyAzureBlobStorage_v12.REL_SUCCESS, 1);
|
||||
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(CopyAzureBlobStorage_v12.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(CopyAzureBlobStorage_v12.REL_SUCCESS).getFirst();
|
||||
|
||||
assertFlowFileCommonBlobAttributes(flowFile, containerName, blobName);
|
||||
if (blobData != null) {
|
||||
|
|
|
@ -172,7 +172,7 @@ public class ITDeleteAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT
|
|||
private void assertFlowFile(Relationship relationship) throws Exception {
|
||||
runner.assertAllFlowFilesTransferred(relationship, 1);
|
||||
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(relationship).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(relationship).getFirst();
|
||||
|
||||
flowFile.assertContentEquals(EMPTY_CONTENT);
|
||||
}
|
||||
|
|
|
@ -424,7 +424,7 @@ public class ITDeleteAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
startRunner(inputFlowFileContent, Collections.emptyMap());
|
||||
|
||||
// THEN
|
||||
DataLakeStorageException e = (DataLakeStorageException) runner.getLogger().getErrorMessages().get(0).getThrowable();
|
||||
DataLakeStorageException e = (DataLakeStorageException) runner.getLogger().getErrorMessages().getFirst().getThrowable();
|
||||
assertEquals(expectedErrorCode, e.getStatusCode());
|
||||
|
||||
assertFailure(expectedFlowFileContent);
|
||||
|
@ -439,7 +439,7 @@ public class ITDeleteAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
startRunner(inputFlowFileContent, attributes);
|
||||
|
||||
// THEN
|
||||
Throwable exception = runner.getLogger().getErrorMessages().get(0).getThrowable();
|
||||
Throwable exception = runner.getLogger().getErrorMessages().getFirst().getThrowable();
|
||||
assertEquals(ProcessException.class, exception.getClass());
|
||||
|
||||
assertFailure(expectedFlowFileContent);
|
||||
|
@ -459,7 +459,7 @@ public class ITDeleteAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
}
|
||||
|
||||
private void setRunnerProperties(String fileSystem, String directory, String filename) {
|
||||
runner.setProperty(DeleteAzureDataLakeStorage.FILESYSTEM_OBJECT_TYPE, filename != null ? FS_TYPE_FILE.getValue() : FS_TYPE_DIRECTORY.getValue());
|
||||
runner.setProperty(DeleteAzureDataLakeStorage.FILESYSTEM_OBJECT_TYPE, filename != null ? FS_TYPE_FILE : FS_TYPE_DIRECTORY);
|
||||
runner.setProperty(DeleteAzureDataLakeStorage.FILESYSTEM, fileSystem);
|
||||
runner.setProperty(DeleteAzureDataLakeStorage.DIRECTORY, directory);
|
||||
if (filename != null) {
|
||||
|
@ -475,13 +475,13 @@ public class ITDeleteAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
|
||||
private void assertSuccess(String directory, String filename, String expectedFlowFileContent, int expectedNumberOfProvenanceEvents, ProvenanceEventType expectedEventType) {
|
||||
runner.assertAllFlowFilesTransferred(DeleteAzureDataLakeStorage.REL_SUCCESS, 1);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(DeleteAzureDataLakeStorage.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(DeleteAzureDataLakeStorage.REL_SUCCESS).getFirst();
|
||||
flowFile.assertContentEquals(expectedFlowFileContent);
|
||||
|
||||
int actualNumberOfProvenanceEvents = runner.getProvenanceEvents().size();
|
||||
assertEquals(expectedNumberOfProvenanceEvents, actualNumberOfProvenanceEvents);
|
||||
|
||||
ProvenanceEventType actualEventType = runner.getProvenanceEvents().get(0).getEventType();
|
||||
ProvenanceEventType actualEventType = runner.getProvenanceEvents().getFirst().getEventType();
|
||||
assertEquals(expectedEventType, actualEventType);
|
||||
|
||||
if (filename != null) {
|
||||
|
@ -493,7 +493,7 @@ public class ITDeleteAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
|
||||
private void assertFailure(String expectedFlowFileContent) {
|
||||
runner.assertAllFlowFilesTransferred(DeleteAzureDataLakeStorage.REL_FAILURE, 1);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(DeleteAzureDataLakeStorage.REL_FAILURE).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(DeleteAzureDataLakeStorage.REL_FAILURE).getFirst();
|
||||
flowFile.assertContentEquals(expectedFlowFileContent);
|
||||
}
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ public class ITFetchAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT
|
|||
|
||||
@Test
|
||||
public void testFetchBlobWithCSE() throws Exception {
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL.name());
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_ID, KEY_ID_VALUE);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_LOCAL_KEY, KEY_128B_VALUE);
|
||||
uploadBlobWithCSE(BLOB_NAME, BLOB_DATA, KEY_128B_VALUE, KEY_ID_VALUE, KeyWrapAlgorithm.A128KW.toString());
|
||||
|
@ -205,7 +205,7 @@ public class ITFetchAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT
|
|||
private void assertFlowFile(String blobName, byte[] blobData, Integer originalLength) throws Exception {
|
||||
runner.assertAllFlowFilesTransferred(FetchAzureBlobStorage_v12.REL_SUCCESS, 1);
|
||||
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(FetchAzureBlobStorage_v12.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(FetchAzureBlobStorage_v12.REL_SUCCESS).getFirst();
|
||||
|
||||
assertFlowFileCommonBlobAttributes(flowFile, getContainerName(), blobName);
|
||||
if(originalLength != null) {
|
||||
|
@ -226,7 +226,7 @@ public class ITFetchAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT
|
|||
private void assertFailure() throws Exception {
|
||||
runner.assertAllFlowFilesTransferred(FetchAzureBlobStorage_v12.REL_FAILURE, 1);
|
||||
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(FetchAzureBlobStorage_v12.REL_FAILURE).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(FetchAzureBlobStorage_v12.REL_FAILURE).getFirst();
|
||||
flowFile.assertContentEquals(EMPTY_CONTENT);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -236,7 +236,6 @@ public class ITFetchAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
Random random = new Random();
|
||||
byte[] fileContentBytes = new byte[120_000_000];
|
||||
random.nextBytes(fileContentBytes);
|
||||
String fileContent = new String(fileContentBytes);
|
||||
String inputFlowFileContent = "InputFlowFileContent";
|
||||
|
||||
createDirectoryAndUploadFile(directory, filename, TEST_FILE_CONTENT);
|
||||
|
@ -446,7 +445,7 @@ public class ITFetchAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
startRunner(inputFlowFileContent, Collections.emptyMap());
|
||||
|
||||
// THEN
|
||||
DataLakeStorageException e = (DataLakeStorageException)runner.getLogger().getErrorMessages().get(0).getThrowable();
|
||||
DataLakeStorageException e = (DataLakeStorageException)runner.getLogger().getErrorMessages().getFirst().getThrowable();
|
||||
assertEquals(416, e.getStatusCode());
|
||||
}
|
||||
|
||||
|
@ -489,7 +488,7 @@ public class ITFetchAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
startRunner(inputFlowFileContent, attributes);
|
||||
|
||||
// THEN
|
||||
DataLakeStorageException e = (DataLakeStorageException)runner.getLogger().getErrorMessages().get(0).getThrowable();
|
||||
DataLakeStorageException e = (DataLakeStorageException)runner.getLogger().getErrorMessages().getFirst().getThrowable();
|
||||
assertEquals(expectedErrorCode, e.getStatusCode());
|
||||
|
||||
assertFailure(expectedFlowFileContent);
|
||||
|
@ -508,7 +507,7 @@ public class ITFetchAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
startRunner(inputFlowFileContent, attributes);
|
||||
|
||||
// THEN
|
||||
Throwable exception = runner.getLogger().getErrorMessages().get(0).getThrowable();
|
||||
Throwable exception = runner.getLogger().getErrorMessages().getFirst().getThrowable();
|
||||
assertEquals(ProcessException.class, exception.getClass());
|
||||
|
||||
assertFailure(expectedFlowFileContent);
|
||||
|
@ -541,7 +540,7 @@ public class ITFetchAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
|
||||
private void assertSuccess(String expectedFlowFileContent, Set<ProvenanceEventType> expectedEventTypes) {
|
||||
runner.assertAllFlowFilesTransferred(FetchAzureDataLakeStorage.REL_SUCCESS, 1);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(FetchAzureDataLakeStorage.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(FetchAzureDataLakeStorage.REL_SUCCESS).getFirst();
|
||||
flowFile.assertContentEquals(expectedFlowFileContent);
|
||||
|
||||
Set<ProvenanceEventType> actualEventTypes = runner.getProvenanceEvents().stream()
|
||||
|
@ -552,7 +551,7 @@ public class ITFetchAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT
|
|||
|
||||
private void assertFailure(String expectedFlowFileContent) {
|
||||
runner.assertAllFlowFilesTransferred(FetchAzureDataLakeStorage.REL_FAILURE, 1);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(FetchAzureDataLakeStorage.REL_FAILURE).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(FetchAzureDataLakeStorage.REL_FAILURE).getFirst();
|
||||
flowFile.assertContentEquals(expectedFlowFileContent);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -130,7 +130,7 @@ public class ITListAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
runner.run();
|
||||
|
||||
runner.assertAllFlowFilesTransferred(ListAzureBlobStorage_v12.REL_SUCCESS, 1);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListAzureBlobStorage_v12.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListAzureBlobStorage_v12.REL_SUCCESS).getFirst();
|
||||
flowFile.assertAttributeEquals("record.count", "4");
|
||||
}
|
||||
|
||||
|
@ -174,7 +174,7 @@ public class ITListAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
runProcessor();
|
||||
|
||||
runner.assertAllFlowFilesTransferred(ListAzureBlobStorage_v12.REL_SUCCESS, 1);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListAzureBlobStorage_v12.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListAzureBlobStorage_v12.REL_SUCCESS).getFirst();
|
||||
assertFlowFileCommonBlobAttributes(flowFile, getContainerName(), "blob5");
|
||||
assertFlowFileResultBlobAttributes(flowFile, "Test".length());
|
||||
}
|
||||
|
|
|
@ -378,7 +378,7 @@ public class ITListAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT {
|
|||
runner.run();
|
||||
|
||||
runner.assertAllFlowFilesTransferred(ListAzureDataLakeStorage.REL_SUCCESS, 1);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListAzureDataLakeStorage.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListAzureDataLakeStorage.REL_SUCCESS).getFirst();
|
||||
flowFile.assertAttributeEquals("record.count", "3");
|
||||
}
|
||||
|
||||
|
@ -396,7 +396,7 @@ public class ITListAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT {
|
|||
runner.run();
|
||||
|
||||
runner.assertAllFlowFilesTransferred(ListAzureDataLakeStorage.REL_SUCCESS, 1);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListAzureDataLakeStorage.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(ListAzureDataLakeStorage.REL_SUCCESS).getFirst();
|
||||
flowFile.assertAttributeEquals("record.count", "5");
|
||||
}
|
||||
|
||||
|
|
|
@ -336,7 +336,7 @@ public class ITMoveAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT {
|
|||
private MockFlowFile assertFlowFile(byte[] fileData) throws Exception {
|
||||
runner.assertAllFlowFilesTransferred(MoveAzureDataLakeStorage.REL_SUCCESS, 1);
|
||||
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(MoveAzureDataLakeStorage.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(MoveAzureDataLakeStorage.REL_SUCCESS).getFirst();
|
||||
|
||||
flowFile.assertContentEquals(fileData);
|
||||
|
||||
|
|
|
@ -46,6 +46,7 @@ import java.util.stream.Collectors;
|
|||
import static org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_ERROR_CODE;
|
||||
import static org.apache.nifi.processors.azure.storage.utils.BlobAttributes.ATTR_NAME_IGNORED;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
||||
|
@ -168,13 +169,13 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
runProcessor(BLOB_DATA);
|
||||
|
||||
MockFlowFile flowFile = assertFailure(BLOB_DATA, BlobErrorCode.BLOB_ALREADY_EXISTS);
|
||||
assertEquals(flowFile.getAttribute(ATTR_NAME_IGNORED), null);
|
||||
assertNull(flowFile.getAttribute(ATTR_NAME_IGNORED));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPutBlobToExistingBlobConflictStrategyIgnore() throws Exception {
|
||||
uploadBlob(BLOB_NAME, BLOB_DATA);
|
||||
runner.setProperty(AzureStorageUtils.CONFLICT_RESOLUTION, AzureStorageConflictResolutionStrategy.IGNORE_RESOLUTION.getValue());
|
||||
runner.setProperty(AzureStorageUtils.CONFLICT_RESOLUTION, AzureStorageConflictResolutionStrategy.IGNORE_RESOLUTION);
|
||||
|
||||
runProcessor(BLOB_DATA);
|
||||
|
||||
|
@ -185,7 +186,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
@Test
|
||||
public void testPutBlobToExistingBlobConflictStrategyReplace() throws Exception {
|
||||
uploadBlob(BLOB_NAME, BLOB_DATA);
|
||||
runner.setProperty(AzureStorageUtils.CONFLICT_RESOLUTION, AzureStorageConflictResolutionStrategy.REPLACE_RESOLUTION.getValue());
|
||||
runner.setProperty(AzureStorageUtils.CONFLICT_RESOLUTION, AzureStorageConflictResolutionStrategy.REPLACE_RESOLUTION);
|
||||
|
||||
runProcessor(BLOB_DATA);
|
||||
|
||||
|
@ -203,7 +204,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
|
||||
@Test
|
||||
public void testPutBlob64BLocalCSE() {
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL.name());
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_ID, KEY_ID_VALUE);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_LOCAL_KEY, KEY_64B_VALUE);
|
||||
runner.assertNotValid();
|
||||
|
@ -211,7 +212,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
|
||||
@Test
|
||||
public void testPutBlob128BLocalCSE() throws Exception {
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL.name());
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_ID, KEY_ID_VALUE);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_LOCAL_KEY, KEY_128B_VALUE);
|
||||
runProcessor(BLOB_DATA);
|
||||
|
@ -220,7 +221,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
|
||||
@Test
|
||||
public void testPutBlob192BLocalCSE() throws Exception {
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL.name());
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_ID, KEY_ID_VALUE);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_LOCAL_KEY, KEY_192B_VALUE);
|
||||
runProcessor(BLOB_DATA);
|
||||
|
@ -229,7 +230,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
|
||||
@Test
|
||||
public void testPutBlob256BLocalCSE() throws Exception {
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL.name());
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_ID, KEY_ID_VALUE);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_LOCAL_KEY, KEY_256B_VALUE);
|
||||
runProcessor(BLOB_DATA);
|
||||
|
@ -238,7 +239,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
|
||||
@Test
|
||||
public void testPutBlob384BLocalCSE() throws Exception {
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL.name());
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_ID, KEY_ID_VALUE);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_LOCAL_KEY, KEY_384B_VALUE);
|
||||
runProcessor(BLOB_DATA);
|
||||
|
@ -247,7 +248,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
|
||||
@Test
|
||||
public void testPutBlob512BLocalCSE() throws Exception {
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL.name());
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_TYPE, ClientSideEncryptionMethod.LOCAL);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_KEY_ID, KEY_ID_VALUE);
|
||||
runner.setProperty(ClientSideEncryptionSupport.CSE_LOCAL_KEY, KEY_512B_VALUE);
|
||||
runProcessor(BLOB_DATA);
|
||||
|
@ -264,7 +265,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
runner.setProperty(service, StandardFileResourceService.FILE_PATH, String.format("${%s}", attributeName));
|
||||
runner.enableControllerService(service);
|
||||
|
||||
runner.setProperty(ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE, ResourceTransferSource.FILE_RESOURCE_SERVICE.getValue());
|
||||
runner.setProperty(ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE, ResourceTransferSource.FILE_RESOURCE_SERVICE);
|
||||
runner.setProperty(ResourceTransferProperties.FILE_RESOURCE_SERVICE, serviceId);
|
||||
|
||||
Path tempFilePath = Files.createTempFile("ITPutAzureBlobStorage_v12_testPutBlobFromLocalFile_", "");
|
||||
|
@ -276,7 +277,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
runProcessor(EMPTY_CONTENT, attributes);
|
||||
|
||||
runner.assertAllFlowFilesTransferred(PutAzureBlobStorage_v12.REL_SUCCESS, 1);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(PutAzureBlobStorage_v12.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(PutAzureBlobStorage_v12.REL_SUCCESS).getFirst();
|
||||
assertFlowFileCommonBlobAttributes(flowFile, getContainerName(), BLOB_NAME);
|
||||
assertFlowFileResultBlobAttributes(flowFile, BLOB_DATA.length);
|
||||
|
||||
|
@ -294,7 +295,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
runner.setProperty(service, StandardFileResourceService.FILE_PATH, String.format("${%s}", attributeName));
|
||||
runner.enableControllerService(service);
|
||||
|
||||
runner.setProperty(ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE, ResourceTransferSource.FILE_RESOURCE_SERVICE.getValue());
|
||||
runner.setProperty(ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE, ResourceTransferSource.FILE_RESOURCE_SERVICE);
|
||||
runner.setProperty(ResourceTransferProperties.FILE_RESOURCE_SERVICE, serviceId);
|
||||
|
||||
String filePath = "nonexistent.txt";
|
||||
|
@ -320,18 +321,16 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
runner.run();
|
||||
}
|
||||
|
||||
private MockFlowFile assertSuccess(String containerName, String blobName, byte[] blobData) throws Exception {
|
||||
MockFlowFile flowFile = assertFlowFile(containerName, blobName, blobData);
|
||||
private void assertSuccess(String containerName, String blobName, byte[] blobData) throws Exception {
|
||||
assertFlowFile(containerName, blobName, blobData);
|
||||
assertAzureBlob(containerName, blobName, blobData);
|
||||
assertProvenanceEvents();
|
||||
return flowFile;
|
||||
}
|
||||
|
||||
private MockFlowFile assertSuccessForCSE(String containerName, String blobName, byte[] blobData) throws Exception {
|
||||
MockFlowFile flowFile = assertFlowFile(containerName, blobName, blobData);
|
||||
private void assertSuccessForCSE(String containerName, String blobName, byte[] blobData) throws Exception {
|
||||
assertFlowFile(containerName, blobName, blobData);
|
||||
assertAzureBlobExists(containerName, blobName);
|
||||
assertProvenanceEvents();
|
||||
return flowFile;
|
||||
}
|
||||
|
||||
private MockFlowFile assertIgnored(String containerName, String blobName) throws Exception {
|
||||
|
@ -343,7 +342,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
private MockFlowFile assertFlowFile(String containerName, String blobName, byte[] blobData) throws Exception {
|
||||
runner.assertAllFlowFilesTransferred(PutAzureBlobStorage_v12.REL_SUCCESS, 1);
|
||||
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(PutAzureBlobStorage_v12.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(PutAzureBlobStorage_v12.REL_SUCCESS).getFirst();
|
||||
|
||||
assertFlowFileCommonBlobAttributes(flowFile, containerName, blobName);
|
||||
if (blobData != null) {
|
||||
|
@ -378,7 +377,7 @@ public class ITPutAzureBlobStorage_v12 extends AbstractAzureBlobStorage_v12IT {
|
|||
private MockFlowFile assertFailure(byte[] blobData, BlobErrorCode errorCode) throws Exception {
|
||||
runner.assertAllFlowFilesTransferred(PutAzureBlobStorage_v12.REL_FAILURE, 1);
|
||||
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(DeleteAzureBlobStorage_v12.REL_FAILURE).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(DeleteAzureBlobStorage_v12.REL_FAILURE).getFirst();
|
||||
flowFile.assertContentEquals(blobData);
|
||||
flowFile.assertAttributeEquals(ATTR_NAME_ERROR_CODE, errorCode.toString());
|
||||
return flowFile;
|
||||
|
|
|
@ -262,7 +262,7 @@ public class ITPutAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT {
|
|||
runner.setProperty(service, StandardFileResourceService.FILE_PATH, String.format("${%s}", attributeName));
|
||||
runner.enableControllerService(service);
|
||||
|
||||
runner.setProperty(ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE, ResourceTransferSource.FILE_RESOURCE_SERVICE.getValue());
|
||||
runner.setProperty(ResourceTransferProperties.RESOURCE_TRANSFER_SOURCE, ResourceTransferSource.FILE_RESOURCE_SERVICE);
|
||||
runner.setProperty(ResourceTransferProperties.FILE_RESOURCE_SERVICE, serviceId);
|
||||
|
||||
Path tempFilePath = Files.createTempFile("ITPutAzureDataLakeStorage_testPutFileFromLocalFile_", "");
|
||||
|
@ -325,7 +325,7 @@ public class ITPutAzureDataLakeStorage extends AbstractAzureDataLakeStorageIT {
|
|||
private MockFlowFile assertFlowFile(byte[] fileData) throws Exception {
|
||||
runner.assertAllFlowFilesTransferred(PutAzureDataLakeStorage.REL_SUCCESS, 1);
|
||||
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(PutAzureDataLakeStorage.REL_SUCCESS).get(0);
|
||||
MockFlowFile flowFile = runner.getFlowFilesForRelationship(PutAzureDataLakeStorage.REL_SUCCESS).getFirst();
|
||||
|
||||
flowFile.assertContentEquals(fileData);
|
||||
|
||||
|
|
|
@ -161,6 +161,6 @@ public class TestClientSideEncryptionSupport {
|
|||
|
||||
private void assertContains(Collection<ValidationResult> result, String explaination) {
|
||||
assertFalse(result.isEmpty(), "There should be validation error");
|
||||
assertTrue(result.stream().filter(v -> v.getExplanation().contains(explaination)).findFirst().isPresent());
|
||||
assertTrue(result.stream().anyMatch(v -> v.getExplanation().contains(explaination)));
|
||||
}
|
||||
}
|
|
@ -30,7 +30,7 @@ public abstract class AbstractTestAzureQueueStorage_v12 {
|
|||
protected void setupStorageCredentialsService() throws InitializationException {
|
||||
runner.addControllerService(CREDENTIALS_SERVICE_IDENTIFIER, credentialsService);
|
||||
runner.setProperty(credentialsService, AzureStorageCredentialsControllerService_v12.ACCOUNT_NAME, "account-name");
|
||||
runner.setProperty(credentialsService, AzureStorageCredentialsControllerService_v12.CREDENTIALS_TYPE, AzureStorageCredentialsType.ACCOUNT_KEY.getAllowableValue());
|
||||
runner.setProperty(credentialsService, AzureStorageCredentialsControllerService_v12.CREDENTIALS_TYPE, AzureStorageCredentialsType.ACCOUNT_KEY);
|
||||
runner.setProperty(credentialsService, AzureStorageCredentialsControllerService_v12.ACCOUNT_KEY, "account-key");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ public class StandardKustoQueryServiceTest {
|
|||
runner.addControllerService(SERVICE_ID, service);
|
||||
|
||||
runner.setProperty(service, StandardKustoQueryService.CLUSTER_URI, CLUSTER_URI);
|
||||
runner.setProperty(service, StandardKustoQueryService.AUTHENTICATION_STRATEGY, KustoAuthenticationStrategy.MANAGED_IDENTITY.getValue());
|
||||
runner.setProperty(service, StandardKustoQueryService.AUTHENTICATION_STRATEGY, KustoAuthenticationStrategy.MANAGED_IDENTITY);
|
||||
runner.setProperty(service, StandardKustoQueryService.APPLICATION_CLIENT_ID, APPLICATION_CLIENT_ID);
|
||||
|
||||
runner.assertValid(service);
|
||||
|
@ -72,7 +72,7 @@ public class StandardKustoQueryServiceTest {
|
|||
runner.addControllerService(SERVICE_ID, service);
|
||||
|
||||
runner.setProperty(service, StandardKustoQueryService.CLUSTER_URI, CLUSTER_URI);
|
||||
runner.setProperty(service, StandardKustoQueryService.AUTHENTICATION_STRATEGY, KustoAuthenticationStrategy.APPLICATION_CREDENTIALS.getValue());
|
||||
runner.setProperty(service, StandardKustoQueryService.AUTHENTICATION_STRATEGY, KustoAuthenticationStrategy.APPLICATION_CREDENTIALS);
|
||||
runner.setProperty(service, StandardKustoQueryService.APPLICATION_CLIENT_ID, APPLICATION_CLIENT_ID);
|
||||
runner.setProperty(service, StandardKustoQueryService.APPLICATION_KEY, UUID.randomUUID().toString());
|
||||
runner.setProperty(service, StandardKustoQueryService.APPLICATION_TENANT_ID, UUID.randomUUID().toString());
|
||||
|
|
|
@ -24,13 +24,13 @@ import com.azure.messaging.eventhubs.models.CreateBatchOptions;
|
|||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.serialization.SimpleRecordSchema;
|
||||
import org.apache.nifi.serialization.WriteResult;
|
||||
import org.apache.nifi.serialization.record.RecordSet;
|
||||
import org.apache.nifi.serialization.record.MapRecord;
|
||||
import org.apache.nifi.serialization.record.MockRecordWriter;
|
||||
import org.apache.nifi.serialization.record.Record;
|
||||
import org.apache.nifi.serialization.record.RecordField;
|
||||
import org.apache.nifi.serialization.record.RecordFieldType;
|
||||
import org.apache.nifi.serialization.record.RecordSchema;
|
||||
import org.apache.nifi.serialization.record.RecordSet;
|
||||
import org.apache.nifi.shared.azure.eventhubs.AzureEventHubTransportType;
|
||||
import org.apache.nifi.util.NoOpProcessor;
|
||||
import org.apache.nifi.util.TestRunner;
|
||||
|
@ -42,19 +42,19 @@ import org.mockito.Mock;
|
|||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.isA;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.mockito.Mockito.any;
|
||||
import static org.mockito.Mockito.isA;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class TestAzureEventHubRecordSink {
|
||||
|
@ -92,7 +92,7 @@ public class TestAzureEventHubRecordSink {
|
|||
runner.setProperty(azureEventHubRecordSink, AzureEventHubRecordSink.EVENT_HUB_NAMESPACE, EVENT_HUB_NAMESPACE);
|
||||
runner.setProperty(azureEventHubRecordSink, AzureEventHubRecordSink.SHARED_ACCESS_POLICY_KEY, POLICY_KEY);
|
||||
runner.setProperty(azureEventHubRecordSink, AzureEventHubRecordSink.RECORD_WRITER_FACTORY, WRITER_IDENTIFIER);
|
||||
runner.setProperty(azureEventHubRecordSink, AzureEventHubRecordSink.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS.getValue());
|
||||
runner.setProperty(azureEventHubRecordSink, AzureEventHubRecordSink.TRANSPORT_TYPE, AzureEventHubTransportType.AMQP_WEB_SOCKETS);
|
||||
runner.enableControllerService(azureEventHubRecordSink);
|
||||
}
|
||||
|
||||
|
|
|
@ -256,7 +256,7 @@ public class TestAzureStorageCredentialsControllerService_v12 {
|
|||
}
|
||||
|
||||
private void configureCredentialsType(AzureStorageCredentialsType credentialsType) {
|
||||
runner.setProperty(credentialsService, CREDENTIALS_TYPE, credentialsType.getAllowableValue());
|
||||
runner.setProperty(credentialsService, CREDENTIALS_TYPE, credentialsType);
|
||||
}
|
||||
|
||||
private void configureAccountKey() {
|
||||
|
|
|
@ -16,22 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.reporting.azure.loganalytics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.text.MessageFormat;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.crypto.Mac;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
|
@ -42,13 +26,28 @@ import org.apache.nifi.expression.ExpressionLanguageScope;
|
|||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.reporting.AbstractReportingTask;
|
||||
|
||||
import javax.crypto.Mac;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.text.MessageFormat;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Abstract ReportingTask to send metrics from Apache NiFi and JVM to Azure
|
||||
* Monitor.
|
||||
*/
|
||||
public abstract class AbstractAzureLogAnalyticsReportingTask extends AbstractReportingTask {
|
||||
|
||||
private static final Charset UTF8 = Charset.forName("UTF-8");
|
||||
private static final Charset UTF8 = StandardCharsets.UTF_8;
|
||||
private static final String HMAC_SHA256_ALG = "HmacSHA256";
|
||||
|
||||
// DateTimeFormatter.RFC_1123_DATE_TIME does not work in every case, such as when a
|
||||
|
@ -90,6 +89,16 @@ public abstract class AbstractAzureLogAnalyticsReportingTask extends AbstractRep
|
|||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT).build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
LOG_ANALYTICS_WORKSPACE_ID,
|
||||
LOG_ANALYTICS_WORKSPACE_KEY,
|
||||
APPLICATION_ID,
|
||||
INSTANCE_ID,
|
||||
PROCESS_GROUP_IDS,
|
||||
JOB_NAME,
|
||||
LOG_ANALYTICS_URL_ENDPOINT_FORMAT
|
||||
);
|
||||
|
||||
protected String createAuthorization(String workspaceId, String key, int contentLength, String rfc1123Date) {
|
||||
try {
|
||||
String signature = String.format("POST\n%d\napplication/json\nx-ms-date:%s\n/api/logs", contentLength,
|
||||
|
@ -105,15 +114,7 @@ public abstract class AbstractAzureLogAnalyticsReportingTask extends AbstractRep
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(LOG_ANALYTICS_WORKSPACE_ID);
|
||||
properties.add(LOG_ANALYTICS_WORKSPACE_KEY);
|
||||
properties.add(APPLICATION_ID);
|
||||
properties.add(INSTANCE_ID);
|
||||
properties.add(PROCESS_GROUP_IDS);
|
||||
properties.add(JOB_NAME);
|
||||
properties.add(LOG_ANALYTICS_URL_ENDPOINT_FORMAT);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -135,7 +136,7 @@ public abstract class AbstractAzureLogAnalyticsReportingTask extends AbstractRep
|
|||
}
|
||||
|
||||
protected void sendToLogAnalytics(final HttpPost request, final String workspaceId, final String linuxPrimaryKey,
|
||||
final String rawJson) throws IllegalArgumentException, RuntimeException, IOException {
|
||||
final String rawJson) throws RuntimeException, IOException {
|
||||
|
||||
final int bodyLength = rawJson.getBytes(UTF8).length;
|
||||
final ZonedDateTime zNow = ZonedDateTime.now(ZoneOffset.UTC);
|
||||
|
|
|
@ -16,29 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.reporting.azure.loganalytics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.text.DateFormat;
|
||||
import java.text.MessageFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonArrayBuilder;
|
||||
import javax.json.JsonBuilderFactory;
|
||||
import javax.json.JsonObject;
|
||||
import javax.json.JsonObjectBuilder;
|
||||
import javax.json.JsonValue;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
|
@ -54,6 +31,27 @@ import org.apache.nifi.provenance.ProvenanceEventType;
|
|||
import org.apache.nifi.reporting.ReportingContext;
|
||||
import org.apache.nifi.reporting.util.provenance.ProvenanceEventConsumer;
|
||||
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonArrayBuilder;
|
||||
import javax.json.JsonBuilderFactory;
|
||||
import javax.json.JsonObject;
|
||||
import javax.json.JsonObjectBuilder;
|
||||
import javax.json.JsonValue;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.text.DateFormat;
|
||||
import java.text.MessageFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
import java.util.UUID;
|
||||
|
||||
@Tags({ "azure", "provenace", "reporting", "log analytics" })
|
||||
@CapabilityDescription("Publishes Provenance events to to a Azure Log Analytics workspace.")
|
||||
public class AzureLogAnalyticsProvenanceReportingTask extends AbstractAzureLogAnalyticsReportingTask {
|
||||
|
@ -145,7 +143,7 @@ public class AzureLogAnalyticsProvenanceReportingTask extends AbstractAzureLogAn
|
|||
.description("If the Reporting Task has never been run, or if its state has been reset by a user, "
|
||||
+ "specifies where in the stream of Provenance Events the Reporting Task should start")
|
||||
.allowableValues(BEGINNING_OF_STREAM, END_OF_STREAM)
|
||||
.defaultValue(BEGINNING_OF_STREAM.getValue()).required(true).build();
|
||||
.defaultValue(BEGINNING_OF_STREAM).required(true).build();
|
||||
|
||||
static final PropertyDescriptor ALLOW_NULL_VALUES = new PropertyDescriptor.Builder().name("include-null-values")
|
||||
.displayName("Include Null Values")
|
||||
|
@ -169,32 +167,34 @@ public class AzureLogAnalyticsProvenanceReportingTask extends AbstractAzureLogAn
|
|||
.description("Specifies how many records to send in a single batch, at most.").required(true)
|
||||
.defaultValue("1000").addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR).build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
LOG_ANALYTICS_WORKSPACE_ID,
|
||||
LOG_ANALYTICS_CUSTOM_LOG_NAME,
|
||||
LOG_ANALYTICS_WORKSPACE_KEY,
|
||||
APPLICATION_ID,
|
||||
INSTANCE_ID,
|
||||
JOB_NAME,
|
||||
LOG_ANALYTICS_URL_ENDPOINT_FORMAT,
|
||||
FILTER_EVENT_TYPE,
|
||||
FILTER_EVENT_TYPE_EXCLUDE,
|
||||
FILTER_COMPONENT_TYPE,
|
||||
FILTER_COMPONENT_TYPE_EXCLUDE,
|
||||
FILTER_COMPONENT_ID,
|
||||
FILTER_COMPONENT_ID_EXCLUDE,
|
||||
FILTER_COMPONENT_NAME,
|
||||
FILTER_COMPONENT_NAME_EXCLUDE,
|
||||
START_POSITION,
|
||||
ALLOW_NULL_VALUES,
|
||||
PLATFORM,
|
||||
INSTANCE_URL,
|
||||
BATCH_SIZE
|
||||
);
|
||||
|
||||
private volatile ProvenanceEventConsumer consumer;
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(LOG_ANALYTICS_WORKSPACE_ID);
|
||||
properties.add(LOG_ANALYTICS_CUSTOM_LOG_NAME);
|
||||
properties.add(LOG_ANALYTICS_WORKSPACE_KEY);
|
||||
properties.add(APPLICATION_ID);
|
||||
properties.add(INSTANCE_ID);
|
||||
properties.add(JOB_NAME);
|
||||
properties.add(LOG_ANALYTICS_URL_ENDPOINT_FORMAT);
|
||||
properties.add(FILTER_EVENT_TYPE);
|
||||
properties.add(FILTER_EVENT_TYPE_EXCLUDE);
|
||||
properties.add(FILTER_COMPONENT_TYPE);
|
||||
properties.add(FILTER_COMPONENT_TYPE_EXCLUDE);
|
||||
properties.add(FILTER_COMPONENT_ID);
|
||||
properties.add(FILTER_COMPONENT_ID_EXCLUDE);
|
||||
properties.add(FILTER_COMPONENT_NAME);
|
||||
properties.add(FILTER_COMPONENT_NAME_EXCLUDE);
|
||||
properties.add(START_POSITION);
|
||||
properties.add(ALLOW_NULL_VALUES);
|
||||
properties.add(PLATFORM);
|
||||
properties.add(INSTANCE_URL);
|
||||
properties.add(BATCH_SIZE);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
public void CreateConsumer(final ReportingContext context) {
|
||||
|
@ -406,20 +406,15 @@ public class AzureLogAnalyticsProvenanceReportingTask extends AbstractAzureLogAn
|
|||
|
||||
public static void addField(final JsonObjectBuilder builder, final String key, final Object value,
|
||||
boolean allowNullValues) {
|
||||
if (value != null) {
|
||||
if (value instanceof String) {
|
||||
builder.add(key, (String) value);
|
||||
} else if (value instanceof Integer) {
|
||||
builder.add(key, (Integer) value);
|
||||
} else if (value instanceof Boolean) {
|
||||
builder.add(key, (Boolean) value);
|
||||
} else if (value instanceof Long) {
|
||||
builder.add(key, (Long) value);
|
||||
} else {
|
||||
builder.add(key, value.toString());
|
||||
switch (value) {
|
||||
case String s -> builder.add(key, s);
|
||||
case Integer i -> builder.add(key, i);
|
||||
case Boolean b -> builder.add(key, b);
|
||||
case Long l -> builder.add(key, l);
|
||||
case null -> {
|
||||
if (allowNullValues) builder.add(key, JsonValue.NULL);
|
||||
}
|
||||
} else if (allowNullValues) {
|
||||
builder.add(key, JsonValue.NULL);
|
||||
default -> builder.add(key, value.toString());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -428,10 +423,8 @@ public class AzureLogAnalyticsProvenanceReportingTask extends AbstractAzureLogAn
|
|||
if (values != null) {
|
||||
final JsonObjectBuilder mapBuilder = factory.createObjectBuilder();
|
||||
for (final Map.Entry<String, String> entry : values.entrySet()) {
|
||||
|
||||
if (entry.getKey() == null) {
|
||||
continue;
|
||||
} else if (entry.getValue() == null) {
|
||||
if (entry.getKey() == null) continue;
|
||||
if (entry.getValue() == null) {
|
||||
if (allowNullValues) {
|
||||
mapBuilder.add(entry.getKey(), JsonValue.NULL);
|
||||
}
|
||||
|
@ -441,7 +434,6 @@ public class AzureLogAnalyticsProvenanceReportingTask extends AbstractAzureLogAn
|
|||
}
|
||||
|
||||
builder.add(key, mapBuilder);
|
||||
|
||||
} else if (allowNullValues) {
|
||||
builder.add(key, JsonValue.NULL);
|
||||
}
|
||||
|
|
|
@ -16,13 +16,8 @@
|
|||
*/
|
||||
package org.apache.nifi.reporting.azure.loganalytics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.google.gson.GsonBuilder;
|
||||
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.nifi.annotation.configuration.DefaultSchedule;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
|
@ -39,6 +34,10 @@ import org.apache.nifi.reporting.ReportingContext;
|
|||
import org.apache.nifi.reporting.azure.loganalytics.api.AzureLogAnalyticsMetricsFactory;
|
||||
import org.apache.nifi.scheduling.SchedulingStrategy;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* ReportingTask to send metrics from Apache NiFi and JVM to Azure Monitor.
|
||||
*/
|
||||
|
@ -59,37 +58,36 @@ public class AzureLogAnalyticsReportingTask extends AbstractAzureLogAnalyticsRep
|
|||
.defaultValue("nifimetrics").addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT).build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
SEND_JVM_METRICS,
|
||||
LOG_ANALYTICS_WORKSPACE_ID,
|
||||
LOG_ANALYTICS_CUSTOM_LOG_NAME,
|
||||
LOG_ANALYTICS_WORKSPACE_KEY,
|
||||
APPLICATION_ID,
|
||||
INSTANCE_ID,
|
||||
PROCESS_GROUP_IDS,
|
||||
JOB_NAME,
|
||||
LOG_ANALYTICS_URL_ENDPOINT_FORMAT
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(SEND_JVM_METRICS);
|
||||
properties.add(LOG_ANALYTICS_WORKSPACE_ID);
|
||||
properties.add(LOG_ANALYTICS_CUSTOM_LOG_NAME);
|
||||
properties.add(LOG_ANALYTICS_WORKSPACE_KEY);
|
||||
properties.add(APPLICATION_ID);
|
||||
properties.add(INSTANCE_ID);
|
||||
properties.add(PROCESS_GROUP_IDS);
|
||||
properties.add(JOB_NAME);
|
||||
properties.add(LOG_ANALYTICS_URL_ENDPOINT_FORMAT);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTrigger(final ReportingContext context) {
|
||||
final String workspaceId = context.getProperty(LOG_ANALYTICS_WORKSPACE_ID).evaluateAttributeExpressions()
|
||||
.getValue();
|
||||
final String linuxPrimaryKey = context.getProperty(LOG_ANALYTICS_WORKSPACE_KEY).evaluateAttributeExpressions()
|
||||
.getValue();
|
||||
final String workspaceId = context.getProperty(LOG_ANALYTICS_WORKSPACE_ID).evaluateAttributeExpressions().getValue();
|
||||
final String linuxPrimaryKey = context.getProperty(LOG_ANALYTICS_WORKSPACE_KEY).evaluateAttributeExpressions().getValue();
|
||||
final boolean jvmMetricsCollected = context.getProperty(SEND_JVM_METRICS).asBoolean();
|
||||
final String logName = context.getProperty(LOG_ANALYTICS_CUSTOM_LOG_NAME).evaluateAttributeExpressions()
|
||||
.getValue();
|
||||
final String logName = context.getProperty(LOG_ANALYTICS_CUSTOM_LOG_NAME).evaluateAttributeExpressions().getValue();
|
||||
final String instanceId = context.getProperty(INSTANCE_ID).evaluateAttributeExpressions().getValue();
|
||||
final String groupIds = context.getProperty(PROCESS_GROUP_IDS).evaluateAttributeExpressions().getValue();
|
||||
final String urlEndpointFormat = context.getProperty(LOG_ANALYTICS_URL_ENDPOINT_FORMAT)
|
||||
.evaluateAttributeExpressions().getValue();
|
||||
|
||||
try {
|
||||
List<Metric> allMetrics = null;
|
||||
final List<Metric> allMetrics;
|
||||
if (groupIds == null || groupIds.isEmpty()) {
|
||||
ProcessGroupStatus status = context.getEventAccess().getControllerStatus();
|
||||
String processGroupName = status.getName();
|
||||
|
|
|
@ -41,13 +41,13 @@ public class Metric {
|
|||
}
|
||||
|
||||
public void setCount(long value){
|
||||
this.count = Long.valueOf((long)value);
|
||||
this.count = (long) value;
|
||||
}
|
||||
public void setCount(double value){
|
||||
this.count = Long.valueOf((long)value);
|
||||
this.count = (long) value;
|
||||
}
|
||||
public void setCount(int value){
|
||||
this.count = Long.valueOf((long)value);
|
||||
this.count = (long) value;
|
||||
}
|
||||
|
||||
public Long getCount() {
|
||||
|
|
|
@ -22,7 +22,7 @@ import java.util.List;
|
|||
/**
|
||||
* MetricsBuilder builds the list of metrics
|
||||
*/
|
||||
public class MetricsBuilder{
|
||||
public class MetricsBuilder {
|
||||
private List<Metric> metrics = new ArrayList<>();
|
||||
|
||||
private String computer;
|
||||
|
@ -34,29 +34,28 @@ public class MetricsBuilder{
|
|||
private boolean isProcessorMetric = false;
|
||||
private String tags = null;
|
||||
|
||||
|
||||
public MetricsBuilder(String category, String instanceId, String processGroupId, String processGroupName) {
|
||||
this.computer = instanceId;
|
||||
this.processGroupName = processGroupName;
|
||||
this.processGroupId = processGroupId;
|
||||
this.categoryName = category;
|
||||
if (category.equals(Metric.CATEGORY_PROCESSOR)){
|
||||
if (category.equals(Metric.CATEGORY_PROCESSOR)) {
|
||||
isProcessorMetric = true;
|
||||
}
|
||||
}
|
||||
|
||||
public MetricsBuilder(String category, String instanceId, String processGroupId, String processGroupName, String processorId, String processorName) {
|
||||
this(category, instanceId,processGroupId,processGroupName);
|
||||
this(category, instanceId, processGroupId, processGroupName);
|
||||
this.processorId = processorId;
|
||||
this.processorName =processorName;
|
||||
this.processorName = processorName;
|
||||
}
|
||||
|
||||
public MetricsBuilder setProcessorId(String processorId){
|
||||
public MetricsBuilder setProcessorId(String processorId) {
|
||||
this.processorId = processorId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MetricsBuilder setProcessorName(String processorName){
|
||||
public MetricsBuilder setProcessorName(String processorName) {
|
||||
this.processorName = processorName;
|
||||
return this;
|
||||
}
|
||||
|
@ -66,69 +65,33 @@ public class MetricsBuilder{
|
|||
return this;
|
||||
}
|
||||
|
||||
public MetricsBuilder metric(String metricName, long count){
|
||||
Metric metric = null;
|
||||
if(isProcessorMetric) {
|
||||
metric = new Metric(this.computer, this.processGroupId, this.processGroupName);
|
||||
metric.setProcessorId(this.processorId);
|
||||
metric.setProcessorName(this.processorName);
|
||||
} else {
|
||||
metric = new Metric(this.computer, this.processGroupId, this.processGroupName);
|
||||
}
|
||||
metric.setCategoryName(this.categoryName);
|
||||
metric.setName(metricName);
|
||||
public MetricsBuilder metric(String metricName, long count) {
|
||||
final Metric metric = buildMetric(metricName);
|
||||
metric.setCount(count);
|
||||
if(this.tags != null) {
|
||||
metric.setTags(this.tags);
|
||||
}
|
||||
metrics.add(metric);
|
||||
return this;
|
||||
}
|
||||
|
||||
public MetricsBuilder metric(String metricName, double count){
|
||||
Metric metric = null;
|
||||
if(isProcessorMetric) {
|
||||
metric = new Metric(this.computer, this.processGroupId, this.processGroupName);
|
||||
metric.setProcessorId(this.processorId);
|
||||
metric.setProcessorName(this.processorName);
|
||||
} else {
|
||||
metric = new Metric(this.computer, this.processGroupId, this.processGroupName);
|
||||
}
|
||||
metric.setCategoryName(this.categoryName);
|
||||
metric.setName(metricName);
|
||||
public MetricsBuilder metric(String metricName, double count) {
|
||||
final Metric metric = buildMetric(metricName);
|
||||
metric.setCount(count);
|
||||
if(this.tags != null) {
|
||||
metric.setTags(this.tags);
|
||||
}
|
||||
metrics.add(metric);
|
||||
return this;
|
||||
}
|
||||
|
||||
public MetricsBuilder metric(String metricName, int count) {
|
||||
Metric metric = null;
|
||||
if(isProcessorMetric) {
|
||||
metric = new Metric(this.computer, this.processGroupId, this.processGroupName);
|
||||
metric.setProcessorId(this.processorId);
|
||||
metric.setProcessorName(this.processorName);
|
||||
} else {
|
||||
metric = new Metric(this.computer, this.processGroupId, this.processGroupName);
|
||||
}
|
||||
metric.setCategoryName(this.categoryName);
|
||||
metric.setName(metricName);
|
||||
final Metric metric = buildMetric(metricName);
|
||||
metric.setCount(count);
|
||||
if(this.tags != null) {
|
||||
metric.setTags(this.tags);
|
||||
}
|
||||
metrics.add(metric);
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<Metric> build() {
|
||||
return metrics;
|
||||
}
|
||||
public List<Metric> getMetrics() {
|
||||
return this.metrics;
|
||||
}
|
||||
|
||||
public void setMetrics(List<Metric> metrics) {
|
||||
this.metrics = metrics;
|
||||
}
|
||||
|
@ -191,5 +154,17 @@ public class MetricsBuilder{
|
|||
return this.tags;
|
||||
}
|
||||
|
||||
|
||||
private Metric buildMetric(String metricName) {
|
||||
final Metric metric = new Metric(this.computer, this.processGroupId, this.processGroupName);
|
||||
if (this.isProcessorMetric) {
|
||||
metric.setProcessorId(this.processorId);
|
||||
metric.setProcessorName(this.processorName);
|
||||
}
|
||||
metric.setCategoryName(this.categoryName);
|
||||
metric.setName(metricName);
|
||||
if (this.tags != null) {
|
||||
metric.setTags(this.tags);
|
||||
}
|
||||
return metric;
|
||||
}
|
||||
}
|
|
@ -153,7 +153,7 @@ public class TestAzureLogAnalyticsReportingTask {
|
|||
testGroupStatus2.setInputCount(1000);
|
||||
}
|
||||
@Test
|
||||
public void testOnTrigger() throws IOException, InterruptedException, InitializationException {
|
||||
public void testOnTrigger() throws InitializationException {
|
||||
testedReportingTask.initialize(reportingInitContextStub);
|
||||
reportingContextStub.getEventAccess().setProcessGroupStatus(rootGroupStatus);
|
||||
testedReportingTask.onTrigger(reportingContextStub);
|
||||
|
@ -162,7 +162,7 @@ public class TestAzureLogAnalyticsReportingTask {
|
|||
TestVerification.assertDatatFlowMetrics(collectedMetrics);
|
||||
}
|
||||
@Test
|
||||
public void testOnTriggerWithOnePG() throws IOException, InterruptedException, InitializationException {
|
||||
public void testOnTriggerWithOnePG() throws InitializationException {
|
||||
initTestGroupStatuses();
|
||||
reportingContextStub.setProperty(AzureLogAnalyticsReportingTask.PROCESS_GROUP_IDS.getName(), TEST_GROUP1_ID);
|
||||
testedReportingTask.initialize(reportingInitContextStub);
|
||||
|
@ -174,7 +174,7 @@ public class TestAzureLogAnalyticsReportingTask {
|
|||
TestVerification.assertDatatFlowMetrics(collectedMetrics);
|
||||
}
|
||||
@Test
|
||||
public void testOnTriggerWithPGList() throws IOException, InterruptedException, InitializationException {
|
||||
public void testOnTriggerWithPGList() throws InitializationException {
|
||||
initTestGroupStatuses();
|
||||
initTestGroup2Statuses();
|
||||
reportingContextStub.setProperty(AzureLogAnalyticsReportingTask.PROCESS_GROUP_IDS.getName(),
|
||||
|
@ -190,7 +190,7 @@ public class TestAzureLogAnalyticsReportingTask {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testEmitJVMMetrics() throws IOException, InterruptedException, InitializationException {
|
||||
public void testEmitJVMMetrics() throws InitializationException {
|
||||
reportingContextStub.setProperty(AzureLogAnalyticsReportingTask.SEND_JVM_METRICS.getName(), "true");
|
||||
testedReportingTask.initialize(reportingInitContextStub);
|
||||
|
||||
|
@ -202,7 +202,7 @@ public class TestAzureLogAnalyticsReportingTask {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testAuthorization() throws IOException, InterruptedException, InitializationException {
|
||||
public void testAuthorization() throws InitializationException {
|
||||
|
||||
reportingContextStub.setProperty(AzureLogAnalyticsReportingTask.SEND_JVM_METRICS.getName(), "true");
|
||||
testedReportingTask.initialize(reportingInitContextStub);
|
||||
|
|
|
@ -20,11 +20,7 @@ import org.apache.nifi.components.DescribedValue;
|
|||
|
||||
public enum AzureStorageConflictResolutionStrategy implements DescribedValue {
|
||||
FAIL_RESOLUTION("fail", "Fail if the blob already exists"),
|
||||
IGNORE_RESOLUTION("ignore",
|
||||
String.format(
|
||||
"Ignore if the blob already exists; the 'azure.error' attribute will be set to the value 'BLOB_ALREADY_EXISTS'"
|
||||
)
|
||||
),
|
||||
IGNORE_RESOLUTION("ignore", "Ignore if the blob already exists; the 'azure.error' attribute will be set to the value 'BLOB_ALREADY_EXISTS'"),
|
||||
REPLACE_RESOLUTION("replace", "Replace blob contents if the blob already exist");
|
||||
|
||||
private final String label;
|
||||
|
|
|
@ -16,9 +16,9 @@
|
|||
*/
|
||||
package org.apache.nifi.services.azure.storage;
|
||||
|
||||
import org.apache.nifi.components.AllowableValue;
|
||||
import org.apache.nifi.components.DescribedValue;
|
||||
|
||||
public enum AzureStorageCredentialsType {
|
||||
public enum AzureStorageCredentialsType implements DescribedValue {
|
||||
|
||||
ACCOUNT_KEY("Account Key", "The primary or secondary Account Key of the storage account that provides full access to the resources in the account"),
|
||||
SAS_TOKEN("SAS Token", "SAS (Shared Access Signature) Token generated for accessing resources in the storage account"),
|
||||
|
@ -34,9 +34,18 @@ public enum AzureStorageCredentialsType {
|
|||
this.description = description;
|
||||
}
|
||||
|
||||
public AllowableValue getAllowableValue() {
|
||||
return new AllowableValue(name(), label, description);
|
||||
@Override
|
||||
public String getValue() {
|
||||
return this.name();
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public String getDisplayName() {
|
||||
return this.label;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return this.description;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue