mirror of
https://github.com/apache/nifi.git
synced 2025-03-01 15:09:11 +00:00
NIFI-13510 Removed KerberosCredentialsService and implementations
- Removed nifi-kerberos-credentials-service-api and NAR - Removed individual Kerberos credentials properties - Removed NIFI_ALLOW_EXPLICIT_KEYTAB from nifi-env.sh - Removed unused references to NiFi Properties in tests This closes #9058 Signed-off-by: Joseph Witt <joewitt@apache.org>
This commit is contained in:
parent
057e02e698
commit
4006cb7849
@ -268,12 +268,6 @@ limitations under the License.
|
|||||||
<version>2.0.0-SNAPSHOT</version>
|
<version>2.0.0-SNAPSHOT</version>
|
||||||
<type>nar</type>
|
<type>nar</type>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-nar</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
<type>nar</type>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-runtime</artifactId>
|
<artifactId>nifi-runtime</artifactId>
|
||||||
|
@ -682,12 +682,6 @@ language governing permissions and limitations under the License. -->
|
|||||||
<version>2.0.0-SNAPSHOT</version>
|
<version>2.0.0-SNAPSHOT</version>
|
||||||
<type>nar</type>
|
<type>nar</type>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-nar</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
<type>nar</type>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-nar</artifactId>
|
<artifactId>nifi-kerberos-user-service-nar</artifactId>
|
||||||
|
@ -855,11 +855,6 @@
|
|||||||
<artifactId>nifi-hadoop-utils</artifactId>
|
<artifactId>nifi-hadoop-utils</artifactId>
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
<version>2.0.0-SNAPSHOT</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-test-utils</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-listed-entity</artifactId>
|
<artifactId>nifi-listed-entity</artifactId>
|
||||||
@ -1442,16 +1437,6 @@
|
|||||||
<artifactId>nifi-http-context-map</artifactId>
|
<artifactId>nifi-http-context-map</artifactId>
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
<version>2.0.0-SNAPSHOT</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
||||||
|
@ -204,16 +204,13 @@ public abstract class AbstractDBCPConnectionPool extends AbstractControllerServi
|
|||||||
final KerberosUser kerberosUser;
|
final KerberosUser kerberosUser;
|
||||||
final KerberosUserService kerberosUserService = context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
final KerberosUserService kerberosUserService = context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
||||||
|
|
||||||
if (kerberosUserService != null) {
|
if (kerberosUserService == null) {
|
||||||
kerberosUser = kerberosUserService.createKerberosUser();
|
kerberosUser = null;
|
||||||
} else {
|
} else {
|
||||||
kerberosUser = getKerberosUserByCredentials(context);
|
kerberosUser = kerberosUserService.createKerberosUser();
|
||||||
}
|
}
|
||||||
return kerberosUser;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected KerberosUser getKerberosUserByCredentials(final ConfigurationContext context) {
|
return kerberosUser;
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -29,11 +29,6 @@
|
|||||||
<artifactId>nifi-security-kerberos</artifactId>
|
<artifactId>nifi-security-kerberos</artifactId>
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
<version>2.0.0-SNAPSHOT</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
||||||
|
@ -1,52 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.nifi.hadoop;
|
|
||||||
|
|
||||||
import org.apache.hadoop.security.authentication.util.KerberosUtil;
|
|
||||||
|
|
||||||
import javax.security.auth.login.AppConfigurationEntry;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Modified Kerberos configuration class from {@link org.apache.hadoop.security.authentication.client.KerberosAuthenticator.KerberosConfiguration}
|
|
||||||
* that requires authentication from a keytab.
|
|
||||||
*/
|
|
||||||
public class KerberosConfiguration extends javax.security.auth.login.Configuration {
|
|
||||||
|
|
||||||
private static final Map<String, String> USER_KERBEROS_OPTIONS = new HashMap<>();
|
|
||||||
private static final AppConfigurationEntry USER_KERBEROS_LOGIN;
|
|
||||||
private static final AppConfigurationEntry[] USER_KERBEROS_CONF;
|
|
||||||
|
|
||||||
KerberosConfiguration(String principal, String keytab) {
|
|
||||||
USER_KERBEROS_OPTIONS.put("principal", principal);
|
|
||||||
USER_KERBEROS_OPTIONS.put("keyTab", keytab);
|
|
||||||
}
|
|
||||||
|
|
||||||
public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
|
|
||||||
return USER_KERBEROS_CONF;
|
|
||||||
}
|
|
||||||
|
|
||||||
static {
|
|
||||||
USER_KERBEROS_OPTIONS.put("doNotPrompt", "true");
|
|
||||||
USER_KERBEROS_OPTIONS.put("useKeyTab", "true");
|
|
||||||
USER_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
|
|
||||||
USER_KERBEROS_LOGIN = new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, USER_KERBEROS_OPTIONS);
|
|
||||||
USER_KERBEROS_CONF = new AppConfigurationEntry[]{USER_KERBEROS_LOGIN};
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,51 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.nifi.hadoop;
|
|
||||||
|
|
||||||
import org.apache.http.auth.Credentials;
|
|
||||||
|
|
||||||
import javax.security.auth.kerberos.KerberosPrincipal;
|
|
||||||
import java.security.Principal;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Crendentials that incorporate a user principal and a keytab file.
|
|
||||||
*/
|
|
||||||
public class KerberosKeytabCredentials implements Credentials {
|
|
||||||
|
|
||||||
private final KerberosPrincipal userPrincipal;
|
|
||||||
private final String keytab;
|
|
||||||
|
|
||||||
public KerberosKeytabCredentials(String principalName, String keytab) {
|
|
||||||
this.userPrincipal = new KerberosPrincipal(principalName);
|
|
||||||
this.keytab = keytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Principal getUserPrincipal() {
|
|
||||||
return userPrincipal;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getPassword() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getKeytab() {
|
|
||||||
return keytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,32 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.nifi.hadoop;
|
|
||||||
|
|
||||||
import org.apache.http.auth.AuthScheme;
|
|
||||||
import org.apache.http.auth.AuthSchemeProvider;
|
|
||||||
import org.apache.http.protocol.HttpContext;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Provider class for KerberosKeytabSPNegoAuthScheme.
|
|
||||||
*/
|
|
||||||
public class KerberosKeytabSPNegoAuthSchemeProvider implements AuthSchemeProvider {
|
|
||||||
|
|
||||||
public AuthScheme create(HttpContext context) {
|
|
||||||
return new KerberosKeytabSPNegoScheme();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,78 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.nifi.hadoop;
|
|
||||||
|
|
||||||
import org.apache.hadoop.security.authentication.util.KerberosUtil;
|
|
||||||
import org.apache.http.auth.Credentials;
|
|
||||||
import org.apache.http.impl.auth.SPNegoScheme;
|
|
||||||
import org.ietf.jgss.GSSContext;
|
|
||||||
import org.ietf.jgss.GSSException;
|
|
||||||
import org.ietf.jgss.GSSManager;
|
|
||||||
import org.ietf.jgss.GSSName;
|
|
||||||
|
|
||||||
import javax.security.auth.Subject;
|
|
||||||
import javax.security.auth.login.LoginContext;
|
|
||||||
import javax.security.auth.login.LoginException;
|
|
||||||
import java.net.UnknownHostException;
|
|
||||||
import java.security.Principal;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.Callable;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This class provides a very similar authentication scheme and token generation as {@link SPNegoScheme} does.
|
|
||||||
* The token generation is based on a keytab file coming from {@link KerberosKeytabCredentials} and the process
|
|
||||||
* uses hadoop-auth tools.
|
|
||||||
*/
|
|
||||||
public class KerberosKeytabSPNegoScheme extends SPNegoScheme {
|
|
||||||
|
|
||||||
public KerberosKeytabSPNegoScheme() {
|
|
||||||
super(true, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public byte[] generateToken(byte[] input, String authServer, Credentials credentials) {
|
|
||||||
Set<Principal> principals = new HashSet<>();
|
|
||||||
principals.add(credentials.getUserPrincipal());
|
|
||||||
Subject subject = new Subject(false, principals, new HashSet<>(), new HashSet<>());
|
|
||||||
|
|
||||||
try {
|
|
||||||
LoginContext loginContext = new LoginContext("", subject, null,
|
|
||||||
new KerberosConfiguration(credentials.getUserPrincipal().getName(),
|
|
||||||
((KerberosKeytabCredentials) credentials).getKeytab()));
|
|
||||||
loginContext.login();
|
|
||||||
Subject loggedInSubject = loginContext.getSubject();
|
|
||||||
|
|
||||||
return Subject.callAs(loggedInSubject, new Callable<byte[]>() {
|
|
||||||
|
|
||||||
public byte[] call() throws UnknownHostException, GSSException {
|
|
||||||
final GSSManager gssManager = GSSManager.getInstance();
|
|
||||||
final String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP", authServer);
|
|
||||||
final GSSName serviceName = gssManager.createName(servicePrincipal, KerberosUtil.NT_GSS_KRB5_PRINCIPAL_OID);
|
|
||||||
final GSSContext gssContext = gssManager.createContext(serviceName, KerberosUtil.GSS_KRB5_MECH_OID, null, 0);
|
|
||||||
gssContext.requestCredDeleg(true);
|
|
||||||
gssContext.requestMutualAuth(true);
|
|
||||||
return gssContext.initSecContext(input, 0, input.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
});
|
|
||||||
} catch (final LoginException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,174 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.nifi.hadoop;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.nifi.components.PropertyDescriptor;
|
|
||||||
import org.apache.nifi.components.ValidationContext;
|
|
||||||
import org.apache.nifi.components.ValidationResult;
|
|
||||||
import org.apache.nifi.components.Validator;
|
|
||||||
import org.apache.nifi.components.resource.ResourceCardinality;
|
|
||||||
import org.apache.nifi.components.resource.ResourceType;
|
|
||||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
|
||||||
import org.apache.nifi.logging.ComponentLog;
|
|
||||||
import org.apache.nifi.processor.util.StandardValidators;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* All processors and controller services that need properties for Kerberos
|
|
||||||
* Principal and Keytab should obtain them through this class by calling:
|
|
||||||
*
|
|
||||||
* KerberosProperties props =
|
|
||||||
* KerberosProperties.create(NiFiProperties.getInstance())
|
|
||||||
*
|
|
||||||
* The properties can be accessed from the resulting KerberosProperties
|
|
||||||
* instance.
|
|
||||||
*/
|
|
||||||
public class KerberosProperties {
|
|
||||||
|
|
||||||
private final File kerberosConfigFile;
|
|
||||||
private final Validator kerberosConfigValidator;
|
|
||||||
private final PropertyDescriptor kerberosPrincipal;
|
|
||||||
private final PropertyDescriptor kerberosKeytab;
|
|
||||||
private final PropertyDescriptor kerberosPassword;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Instantiate a KerberosProperties object but keep in mind it is
|
|
||||||
* effectively a singleton because the krb5.conf file needs to be set as a
|
|
||||||
* system property which this constructor will take care of.
|
|
||||||
*
|
|
||||||
* @param kerberosConfigFile file of krb5.conf
|
|
||||||
*/
|
|
||||||
public KerberosProperties(final File kerberosConfigFile) {
|
|
||||||
this.kerberosConfigFile = kerberosConfigFile;
|
|
||||||
|
|
||||||
this.kerberosConfigValidator = new Validator() {
|
|
||||||
@Override
|
|
||||||
public ValidationResult validate(String subject, String input, ValidationContext context) {
|
|
||||||
// Check that the Kerberos configuration is set
|
|
||||||
if (kerberosConfigFile == null) {
|
|
||||||
return new ValidationResult.Builder()
|
|
||||||
.subject(subject).input(input).valid(false)
|
|
||||||
.explanation("you are missing the nifi.kerberos.krb5.file property which "
|
|
||||||
+ "must be set in order to use Kerberos")
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that the Kerberos configuration is readable
|
|
||||||
if (!kerberosConfigFile.canRead()) {
|
|
||||||
return new ValidationResult.Builder().subject(subject).input(input).valid(false)
|
|
||||||
.explanation(String.format("unable to read Kerberos config [%s], please make sure the path is valid "
|
|
||||||
+ "and nifi has adequate permissions", kerberosConfigFile.getAbsoluteFile()))
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
return new ValidationResult.Builder().subject(subject).input(input).valid(true).build();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
this.kerberosPrincipal = new PropertyDescriptor.Builder()
|
|
||||||
.name("Kerberos Principal")
|
|
||||||
.required(false)
|
|
||||||
.description("Kerberos principal to authenticate as. Requires nifi.kerberos.krb5.file to be set in your nifi.properties")
|
|
||||||
.addValidator(kerberosConfigValidator)
|
|
||||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
this.kerberosKeytab = new PropertyDescriptor.Builder()
|
|
||||||
.name("Kerberos Keytab")
|
|
||||||
.required(false)
|
|
||||||
.description("Kerberos keytab associated with the principal. Requires nifi.kerberos.krb5.file to be set in your nifi.properties")
|
|
||||||
.identifiesExternalResource(ResourceCardinality.SINGLE, ResourceType.FILE)
|
|
||||||
.addValidator(kerberosConfigValidator)
|
|
||||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
this.kerberosPassword = new PropertyDescriptor.Builder()
|
|
||||||
.name("Kerberos Password")
|
|
||||||
.required(false)
|
|
||||||
.description("Kerberos password associated with the principal.")
|
|
||||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
|
||||||
.sensitive(true)
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
public File getKerberosConfigFile() {
|
|
||||||
return kerberosConfigFile;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Validator getKerberosConfigValidator() {
|
|
||||||
return kerberosConfigValidator;
|
|
||||||
}
|
|
||||||
|
|
||||||
public PropertyDescriptor getKerberosPrincipal() {
|
|
||||||
return kerberosPrincipal;
|
|
||||||
}
|
|
||||||
|
|
||||||
public PropertyDescriptor getKerberosKeytab() {
|
|
||||||
return kerberosKeytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
public PropertyDescriptor getKerberosPassword() {
|
|
||||||
return kerberosPassword;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static List<ValidationResult> validatePrincipalWithKeytabOrPassword(final String subject, final Configuration config, final String principal, final String keytab,
|
|
||||||
final String password, final ComponentLog logger) {
|
|
||||||
final List<ValidationResult> results = new ArrayList<>();
|
|
||||||
|
|
||||||
// if security is enabled then the keytab and principal are required
|
|
||||||
final boolean isSecurityEnabled = SecurityUtil.isSecurityEnabled(config);
|
|
||||||
|
|
||||||
final boolean blankPrincipal = (principal == null || principal.isEmpty());
|
|
||||||
if (isSecurityEnabled && blankPrincipal) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.valid(false)
|
|
||||||
.subject(subject)
|
|
||||||
.explanation("Kerberos Principal must be provided when using a secure configuration")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
final boolean blankKeytab = (keytab == null || keytab.isEmpty());
|
|
||||||
final boolean blankPassword = (password == null || password.isEmpty());
|
|
||||||
|
|
||||||
if (isSecurityEnabled && blankKeytab && blankPassword) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.valid(false)
|
|
||||||
.subject(subject)
|
|
||||||
.explanation("Kerberos Keytab or Kerberos Password must be provided when using a secure configuration")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isSecurityEnabled && !blankKeytab && !blankPassword) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.valid(false)
|
|
||||||
.subject(subject)
|
|
||||||
.explanation("Cannot specify both a Kerberos Keytab and a Kerberos Password")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isSecurityEnabled && (!blankPrincipal || !blankKeytab)) {
|
|
||||||
logger.warn("Configuration does not have security enabled, Keytab and Principal will be ignored");
|
|
||||||
}
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -31,7 +31,6 @@ import java.security.AccessController;
|
|||||||
import java.security.PrivilegedActionException;
|
import java.security.PrivilegedActionException;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Random;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -49,17 +48,10 @@ public class SecurityUtil {
|
|||||||
* <p/>
|
* <p/>
|
||||||
* As of Apache NiFi 1.5.0, this method uses {@link UserGroupInformation#loginUserFromKeytab(String, String)} to
|
* As of Apache NiFi 1.5.0, this method uses {@link UserGroupInformation#loginUserFromKeytab(String, String)} to
|
||||||
* authenticate the given <code>principal</code>, which sets the static variable <code>loginUser</code> in the
|
* authenticate the given <code>principal</code>, which sets the static variable <code>loginUser</code> in the
|
||||||
* {@link UserGroupInformation} instance. Setting <code>loginUser</code> is necessary for
|
* {@link UserGroupInformation} instance.
|
||||||
* {@link org.apache.hadoop.ipc.Client.Connection#handleSaslConnectionFailure(int, int, Exception, Random, UserGroupInformation)}
|
|
||||||
* to be able to attempt a relogin during a connection failure. The <code>handleSaslConnectionFailure</code> method
|
|
||||||
* calls <code>UserGroupInformation.getLoginUser().reloginFromKeytab()</code> statically, which can return null
|
* calls <code>UserGroupInformation.getLoginUser().reloginFromKeytab()</code> statically, which can return null
|
||||||
* if <code>loginUser</code> is not set, resulting in failure of the hadoop operation.
|
* if <code>loginUser</code> is not set, resulting in failure of the hadoop operation.
|
||||||
* <p/>
|
* <p/>
|
||||||
* In previous versions of NiFi, {@link UserGroupInformation#loginUserFromKeytabAndReturnUGI(String, String)} was
|
|
||||||
* used to authenticate the <code>principal</code>, which does not set <code>loginUser</code>, making it impossible
|
|
||||||
* for a
|
|
||||||
* {@link org.apache.hadoop.ipc.Client.Connection#handleSaslConnectionFailure(int, int, Exception, Random, UserGroupInformation)}
|
|
||||||
* to be able to implicitly relogin the principal.
|
|
||||||
*
|
*
|
||||||
* @param config the configuration instance
|
* @param config the configuration instance
|
||||||
* @param principal the principal to authenticate as
|
* @param principal the principal to authenticate as
|
||||||
|
@ -37,24 +37,19 @@ import org.apache.nifi.components.resource.ResourceType;
|
|||||||
import org.apache.nifi.context.PropertyContext;
|
import org.apache.nifi.context.PropertyContext;
|
||||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||||
import org.apache.nifi.flowfile.FlowFile;
|
import org.apache.nifi.flowfile.FlowFile;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.hadoop.SecurityUtil;
|
import org.apache.nifi.hadoop.SecurityUtil;
|
||||||
import org.apache.nifi.kerberos.KerberosCredentialsService;
|
|
||||||
import org.apache.nifi.kerberos.KerberosUserService;
|
import org.apache.nifi.kerberos.KerberosUserService;
|
||||||
|
import org.apache.nifi.migration.PropertyConfiguration;
|
||||||
import org.apache.nifi.processor.AbstractProcessor;
|
import org.apache.nifi.processor.AbstractProcessor;
|
||||||
import org.apache.nifi.processor.ProcessContext;
|
import org.apache.nifi.processor.ProcessContext;
|
||||||
import org.apache.nifi.processor.ProcessSession;
|
import org.apache.nifi.processor.ProcessSession;
|
||||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||||
import org.apache.nifi.processor.util.StandardValidators;
|
import org.apache.nifi.processor.util.StandardValidators;
|
||||||
import org.apache.nifi.security.krb.KerberosKeytabUser;
|
|
||||||
import org.apache.nifi.security.krb.KerberosPasswordUser;
|
|
||||||
import org.apache.nifi.security.krb.KerberosUser;
|
import org.apache.nifi.security.krb.KerberosUser;
|
||||||
import org.ietf.jgss.GSSException;
|
import org.ietf.jgss.GSSException;
|
||||||
|
|
||||||
import javax.net.SocketFactory;
|
import javax.net.SocketFactory;
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.lang.reflect.Field;
|
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.net.Socket;
|
import java.net.Socket;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
@ -83,8 +78,6 @@ import java.util.stream.Stream;
|
|||||||
*/
|
*/
|
||||||
@RequiresInstanceClassLoading(cloneAncestorResources = true)
|
@RequiresInstanceClassLoading(cloneAncestorResources = true)
|
||||||
public abstract class AbstractHadoopProcessor extends AbstractProcessor implements ClassloaderIsolationKeyProvider {
|
public abstract class AbstractHadoopProcessor extends AbstractProcessor implements ClassloaderIsolationKeyProvider {
|
||||||
private static final String ALLOW_EXPLICIT_KEYTAB = "NIFI_ALLOW_EXPLICIT_KEYTAB";
|
|
||||||
|
|
||||||
private static final String DENY_LFS_ACCESS = "NIFI_HDFS_DENY_LOCAL_FILE_SYSTEM_ACCESS";
|
private static final String DENY_LFS_ACCESS = "NIFI_HDFS_DENY_LOCAL_FILE_SYSTEM_ACCESS";
|
||||||
|
|
||||||
private static final String DENY_LFS_EXPLANATION = String.format("LFS Access Denied according to Environment Variable [%s]", DENY_LFS_ACCESS);
|
private static final String DENY_LFS_EXPLANATION = String.format("LFS Access Denied according to Environment Variable [%s]", DENY_LFS_ACCESS);
|
||||||
@ -146,14 +139,6 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor implemen
|
|||||||
.dynamicallyModifiesClasspath(true)
|
.dynamicallyModifiesClasspath(true)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
public static final PropertyDescriptor KERBEROS_CREDENTIALS_SERVICE = new PropertyDescriptor.Builder()
|
|
||||||
.name("kerberos-credentials-service")
|
|
||||||
.displayName("Kerberos Credentials Service")
|
|
||||||
.description("Specifies the Kerberos Credentials Controller Service that should be used for authenticating with Kerberos")
|
|
||||||
.identifiesControllerService(KerberosCredentialsService.class)
|
|
||||||
.required(false)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
static final PropertyDescriptor KERBEROS_USER_SERVICE = new PropertyDescriptor.Builder()
|
static final PropertyDescriptor KERBEROS_USER_SERVICE = new PropertyDescriptor.Builder()
|
||||||
.name("kerberos-user-service")
|
.name("kerberos-user-service")
|
||||||
.displayName("Kerberos User Service")
|
.displayName("Kerberos User Service")
|
||||||
@ -171,9 +156,7 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor implemen
|
|||||||
private static final Object RESOURCES_LOCK = new Object();
|
private static final Object RESOURCES_LOCK = new Object();
|
||||||
private static final HdfsResources EMPTY_HDFS_RESOURCES = new HdfsResources(null, null, null, null);
|
private static final HdfsResources EMPTY_HDFS_RESOURCES = new HdfsResources(null, null, null, null);
|
||||||
|
|
||||||
protected KerberosProperties kerberosProperties;
|
|
||||||
protected List<PropertyDescriptor> properties;
|
protected List<PropertyDescriptor> properties;
|
||||||
private volatile File kerberosConfigFile = null;
|
|
||||||
|
|
||||||
// variables shared by all threads of this processor
|
// variables shared by all threads of this processor
|
||||||
// Hadoop Configuration, Filesystem, and UserGroupInformation (optional)
|
// Hadoop Configuration, Filesystem, and UserGroupInformation (optional)
|
||||||
@ -186,23 +169,20 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor implemen
|
|||||||
protected void init(ProcessorInitializationContext context) {
|
protected void init(ProcessorInitializationContext context) {
|
||||||
hdfsResources.set(EMPTY_HDFS_RESOURCES);
|
hdfsResources.set(EMPTY_HDFS_RESOURCES);
|
||||||
|
|
||||||
kerberosConfigFile = context.getKerberosConfigurationFile();
|
|
||||||
kerberosProperties = getKerberosProperties(kerberosConfigFile);
|
|
||||||
|
|
||||||
List<PropertyDescriptor> props = new ArrayList<>();
|
List<PropertyDescriptor> props = new ArrayList<>();
|
||||||
props.add(HADOOP_CONFIGURATION_RESOURCES);
|
props.add(HADOOP_CONFIGURATION_RESOURCES);
|
||||||
props.add(KERBEROS_CREDENTIALS_SERVICE);
|
|
||||||
props.add(KERBEROS_USER_SERVICE);
|
props.add(KERBEROS_USER_SERVICE);
|
||||||
props.add(kerberosProperties.getKerberosPrincipal());
|
|
||||||
props.add(kerberosProperties.getKerberosKeytab());
|
|
||||||
props.add(kerberosProperties.getKerberosPassword());
|
|
||||||
props.add(KERBEROS_RELOGIN_PERIOD);
|
props.add(KERBEROS_RELOGIN_PERIOD);
|
||||||
props.add(ADDITIONAL_CLASSPATH_RESOURCES);
|
props.add(ADDITIONAL_CLASSPATH_RESOURCES);
|
||||||
properties = Collections.unmodifiableList(props);
|
properties = Collections.unmodifiableList(props);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
@Override
|
||||||
return new KerberosProperties(kerberosConfigFile);
|
public void migrateProperties(final PropertyConfiguration config) {
|
||||||
|
config.removeProperty("Kerberos Principal");
|
||||||
|
config.removeProperty("Kerberos Password");
|
||||||
|
config.removeProperty("Kerberos Keytab");
|
||||||
|
config.removeProperty("kerberos-credentials-service");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -236,20 +216,7 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor implemen
|
|||||||
builder.add(context.getProperty(HADOOP_CONFIGURATION_RESOURCES).getValue());
|
builder.add(context.getProperty(HADOOP_CONFIGURATION_RESOURCES).getValue());
|
||||||
builder.add(context.getProperty(ADDITIONAL_CLASSPATH_RESOURCES).getValue());
|
builder.add(context.getProperty(ADDITIONAL_CLASSPATH_RESOURCES).getValue());
|
||||||
|
|
||||||
final String explicitKerberosPrincipal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
|
|
||||||
if (explicitKerberosPrincipal != null) {
|
|
||||||
builder.add(explicitKerberosPrincipal);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
|
|
||||||
if (credentialsService != null) {
|
|
||||||
final String credentialsServicePrincipal = credentialsService.getPrincipal();
|
|
||||||
if (credentialsServicePrincipal != null) {
|
|
||||||
builder.add(credentialsServicePrincipal);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
final KerberosUserService kerberosUserService = context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
final KerberosUserService kerberosUserService = context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
||||||
if (kerberosUserService != null) {
|
if (kerberosUserService != null) {
|
||||||
final KerberosUser kerberosUser = kerberosUserService.createKerberosUser();
|
final KerberosUser kerberosUser = kerberosUserService.createKerberosUser();
|
||||||
@ -264,22 +231,6 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor implemen
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
|
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
|
||||||
final String explicitPrincipal = validationContext.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
|
|
||||||
final String explicitKeytab = validationContext.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
|
|
||||||
final String explicitPassword = validationContext.getProperty(kerberosProperties.getKerberosPassword()).getValue();
|
|
||||||
final KerberosCredentialsService credentialsService = validationContext.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
|
|
||||||
final KerberosUserService kerberosUserService = validationContext.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
|
||||||
|
|
||||||
final String resolvedPrincipal;
|
|
||||||
final String resolvedKeytab;
|
|
||||||
if (credentialsService == null) {
|
|
||||||
resolvedPrincipal = explicitPrincipal;
|
|
||||||
resolvedKeytab = explicitKeytab;
|
|
||||||
} else {
|
|
||||||
resolvedPrincipal = credentialsService.getPrincipal();
|
|
||||||
resolvedKeytab = credentialsService.getKeytab();
|
|
||||||
}
|
|
||||||
|
|
||||||
final List<ValidationResult> results = new ArrayList<>();
|
final List<ValidationResult> results = new ArrayList<>();
|
||||||
final List<String> locations = getConfigLocations(validationContext);
|
final List<String> locations = getConfigLocations(validationContext);
|
||||||
|
|
||||||
@ -289,16 +240,6 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor implemen
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
final Configuration conf = getHadoopConfigurationForValidation(locations);
|
final Configuration conf = getHadoopConfigurationForValidation(locations);
|
||||||
if (kerberosUserService == null) {
|
|
||||||
results.addAll(KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
this.getClass().getSimpleName(), conf, resolvedPrincipal, resolvedKeytab, explicitPassword, getLogger()));
|
|
||||||
} else {
|
|
||||||
final boolean securityEnabled = SecurityUtil.isSecurityEnabled(conf);
|
|
||||||
if (!securityEnabled) {
|
|
||||||
getLogger().warn("Hadoop Configuration does not have security enabled, KerberosUserService will be ignored");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
results.addAll(validateFileSystem(conf));
|
results.addAll(validateFileSystem(conf));
|
||||||
} catch (final IOException e) {
|
} catch (final IOException e) {
|
||||||
results.add(new ValidationResult.Builder()
|
results.add(new ValidationResult.Builder()
|
||||||
@ -308,39 +249,6 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor implemen
|
|||||||
.build());
|
.build());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (credentialsService != null && (explicitPrincipal != null || explicitKeytab != null || explicitPassword != null)) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos Credentials")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("Cannot specify a Kerberos Credentials Service while also specifying a Kerberos Principal, Kerberos Keytab, or Kerberos Password")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kerberosUserService != null && (explicitPrincipal != null || explicitKeytab != null || explicitPassword != null)) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos User")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("Cannot specify a Kerberos User Service while also specifying a Kerberos Principal, Kerberos Keytab, or Kerberos Password")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kerberosUserService != null && credentialsService != null) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos User")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("Cannot specify a Kerberos User Service while also specifying a Kerberos Credentials Service")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isAllowExplicitKeytab() && explicitKeytab != null) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos Credentials")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("The '" + ALLOW_EXPLICIT_KEYTAB + "' system environment variable is configured to forbid explicitly configuring Kerberos Keytab in processors. "
|
|
||||||
+ "The Kerberos Credentials Service should be used instead of setting the Kerberos Keytab or Kerberos Principal property.")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -413,29 +321,6 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor implemen
|
|||||||
hdfsResources.set(EMPTY_HDFS_RESOURCES);
|
hdfsResources.set(EMPTY_HDFS_RESOURCES);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void interruptStatisticsThread(final FileSystem fileSystem) throws NoSuchFieldException, IllegalAccessException {
|
|
||||||
final Field statsField = FileSystem.class.getDeclaredField("statistics");
|
|
||||||
statsField.setAccessible(true);
|
|
||||||
|
|
||||||
final Object statsObj = statsField.get(fileSystem);
|
|
||||||
if (statsObj instanceof FileSystem.Statistics) {
|
|
||||||
final FileSystem.Statistics statistics = (FileSystem.Statistics) statsObj;
|
|
||||||
|
|
||||||
final Field statsThreadField = statistics.getClass().getDeclaredField("STATS_DATA_CLEANER");
|
|
||||||
statsThreadField.setAccessible(true);
|
|
||||||
|
|
||||||
final Object statsThreadObj = statsThreadField.get(statistics);
|
|
||||||
if (statsThreadObj instanceof Thread) {
|
|
||||||
final Thread statsThread = (Thread) statsThreadObj;
|
|
||||||
try {
|
|
||||||
statsThread.interrupt();
|
|
||||||
} catch (Exception e) {
|
|
||||||
getLogger().warn("Error interrupting thread", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Configuration getConfigurationFromResources(final Configuration config, final List<String> locations) throws IOException {
|
private static Configuration getConfigurationFromResources(final Configuration config, final List<String> locations) throws IOException {
|
||||||
boolean foundResources = !locations.isEmpty();
|
boolean foundResources = !locations.isEmpty();
|
||||||
|
|
||||||
@ -512,25 +397,6 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor implemen
|
|||||||
final KerberosUserService kerberosUserService = context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
final KerberosUserService kerberosUserService = context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
||||||
if (kerberosUserService != null) {
|
if (kerberosUserService != null) {
|
||||||
return kerberosUserService.createKerberosUser();
|
return kerberosUserService.createKerberosUser();
|
||||||
}
|
|
||||||
|
|
||||||
// Kerberos User Service wasn't set, so create KerberosUser based on credentials service or explicit properties...
|
|
||||||
String principal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
|
|
||||||
String keyTab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
|
|
||||||
String password = context.getProperty(kerberosProperties.getKerberosPassword()).getValue();
|
|
||||||
|
|
||||||
// If the Kerberos Credentials Service is specified, we need to use its configuration, not the explicit properties for principal/keytab.
|
|
||||||
// The customValidate method ensures that only one can be set, so we know that the principal & keytab above are null.
|
|
||||||
final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
|
|
||||||
if (credentialsService != null) {
|
|
||||||
principal = credentialsService.getPrincipal();
|
|
||||||
keyTab = credentialsService.getKeytab();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (keyTab != null) {
|
|
||||||
return new KerberosKeytabUser(principal, keyTab);
|
|
||||||
} else if (password != null) {
|
|
||||||
return new KerberosPasswordUser(principal, password);
|
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalStateException("Unable to authenticate with Kerberos, no keytab or password was provided");
|
throw new IllegalStateException("Unable to authenticate with Kerberos, no keytab or password was provided");
|
||||||
}
|
}
|
||||||
@ -653,13 +519,6 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor implemen
|
|||||||
return hdfsResources.get().getUserGroupInformation();
|
return hdfsResources.get().getUserGroupInformation();
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* Overridable by subclasses in the same package, mainly intended for testing purposes to allow verification without having to set environment variables.
|
|
||||||
*/
|
|
||||||
boolean isAllowExplicitKeytab() {
|
|
||||||
return Boolean.parseBoolean(System.getenv(ALLOW_EXPLICIT_KEYTAB));
|
|
||||||
}
|
|
||||||
|
|
||||||
boolean isLocalFileSystemAccessDenied() {
|
boolean isLocalFileSystemAccessDenied() {
|
||||||
return Boolean.parseBoolean(System.getenv(DENY_LFS_ACCESS));
|
return Boolean.parseBoolean(System.getenv(DENY_LFS_ACCESS));
|
||||||
}
|
}
|
||||||
|
@ -1,136 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.nifi.hadoop;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.nifi.components.ValidationResult;
|
|
||||||
import org.apache.nifi.logging.ComponentLog;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.mockito.Mockito;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
|
||||||
|
|
||||||
public class TestKerberosProperties {
|
|
||||||
@Test
|
|
||||||
public void testWithKerberosConfigFile() {
|
|
||||||
final File file = new File("src/test/resources/krb5.conf");
|
|
||||||
|
|
||||||
final KerberosProperties kerberosProperties = new KerberosProperties(file);
|
|
||||||
assertNotNull(kerberosProperties);
|
|
||||||
|
|
||||||
assertNotNull(kerberosProperties.getKerberosConfigFile());
|
|
||||||
assertNotNull(kerberosProperties.getKerberosConfigValidator());
|
|
||||||
assertNotNull(kerberosProperties.getKerberosPrincipal());
|
|
||||||
assertNotNull(kerberosProperties.getKerberosKeytab());
|
|
||||||
|
|
||||||
final ValidationResult result = kerberosProperties.getKerberosConfigValidator().validate("test", "principal", null);
|
|
||||||
assertTrue(result.isValid());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testWithoutKerberosConfigFile() {
|
|
||||||
final KerberosProperties kerberosProperties = new KerberosProperties(null);
|
|
||||||
assertNotNull(kerberosProperties);
|
|
||||||
|
|
||||||
assertNull(kerberosProperties.getKerberosConfigFile());
|
|
||||||
assertNotNull(kerberosProperties.getKerberosConfigValidator());
|
|
||||||
assertNotNull(kerberosProperties.getKerberosPrincipal());
|
|
||||||
assertNotNull(kerberosProperties.getKerberosKeytab());
|
|
||||||
|
|
||||||
final ValidationResult result = kerberosProperties.getKerberosConfigValidator().validate("test", "principal", null);
|
|
||||||
assertFalse(result.isValid());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testValidatePrincipalAndKeytab() {
|
|
||||||
final ComponentLog log = Mockito.mock(ComponentLog.class);
|
|
||||||
final Configuration config = new Configuration();
|
|
||||||
|
|
||||||
// no security enabled in config so doesn't matter what principal, keytab, and password are
|
|
||||||
List<ValidationResult> results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, null, null, null, log);
|
|
||||||
assertEquals(0, results.size());
|
|
||||||
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, "principal", null, null, log);
|
|
||||||
assertEquals(0, results.size());
|
|
||||||
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, "principal", "keytab", null, log);
|
|
||||||
assertEquals(0, results.size());
|
|
||||||
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, "principal", null, "password", log);
|
|
||||||
assertEquals(0, results.size());
|
|
||||||
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, "principal", "keytab", "password", log);
|
|
||||||
assertEquals(0, results.size());
|
|
||||||
|
|
||||||
// change the config to have kerberos turned on
|
|
||||||
config.set("hadoop.security.authentication", "kerberos");
|
|
||||||
config.set("hadoop.security.authorization", "true");
|
|
||||||
|
|
||||||
// security is enabled, no principal, keytab, or password provided
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, null, null, null, log);
|
|
||||||
assertEquals(2, results.size());
|
|
||||||
|
|
||||||
// security is enabled, keytab provided, no principal or password provided
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, null, "keytab", null, log);
|
|
||||||
assertEquals(1, results.size());
|
|
||||||
|
|
||||||
// security is enabled, password provided, no principal or keytab provided
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, null, null, "password", log);
|
|
||||||
assertEquals(1, results.size());
|
|
||||||
|
|
||||||
// security is enabled, no principal provided, keytab and password provided
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, null, "keytab", "password", log);
|
|
||||||
assertEquals(2, results.size());
|
|
||||||
|
|
||||||
// security is enabled, principal provided, no keytab or password provided
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, "principal", null, null, log);
|
|
||||||
assertEquals(1, results.size());
|
|
||||||
|
|
||||||
// security is enabled, principal and keytab provided, no password provided
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, "principal", "keytab", null, log);
|
|
||||||
assertEquals(0, results.size());
|
|
||||||
|
|
||||||
// security is enabled, no keytab provided, principal and password provided
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, "principal", null, "password", log);
|
|
||||||
assertEquals(0, results.size());
|
|
||||||
|
|
||||||
// security is enabled, principal, keytab, and password provided
|
|
||||||
results = KerberosProperties.validatePrincipalWithKeytabOrPassword(
|
|
||||||
"test", config, "principal", "keytab", "password", log);
|
|
||||||
assertEquals(1, results.size());
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,12 +0,0 @@
|
|||||||
[libdefaults]
|
|
||||||
default_realm = EXAMPLE.COM
|
|
||||||
|
|
||||||
[realms]
|
|
||||||
EXAMPLE.COM = {
|
|
||||||
kdc = kdc1.example.com
|
|
||||||
kdc = kdc2.example.com
|
|
||||||
admin_server = kdc1.example.com
|
|
||||||
}
|
|
||||||
|
|
||||||
[domain_realm]
|
|
||||||
.example.com = EXAMPLE.COM
|
|
@ -1,21 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
|
||||||
this work for additional information regarding copyright ownership.
|
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
(the "License"); you may not use this file except in compliance with
|
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>hadoop.security.authentication</name>
|
|
||||||
<value>kerberos</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
@ -1,34 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
|
||||||
this work for additional information regarding copyright ownership.
|
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
(the "License"); you may not use this file except in compliance with
|
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
<parent>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-extension-utils</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</parent>
|
|
||||||
|
|
||||||
<artifactId>nifi-kerberos-test-utils</artifactId>
|
|
||||||
|
|
||||||
<dependencies>
|
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
</project>
|
|
@ -1,87 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.nifi.kerberos;
|
|
||||||
|
|
||||||
import org.apache.nifi.annotation.lifecycle.OnEnabled;
|
|
||||||
import org.apache.nifi.components.PropertyDescriptor;
|
|
||||||
import org.apache.nifi.components.resource.ResourceCardinality;
|
|
||||||
import org.apache.nifi.components.resource.ResourceType;
|
|
||||||
import org.apache.nifi.controller.AbstractControllerService;
|
|
||||||
import org.apache.nifi.controller.ConfigurationContext;
|
|
||||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
|
||||||
import org.apache.nifi.processor.util.StandardValidators;
|
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class MockKerberosCredentialsService extends AbstractControllerService implements KerberosCredentialsService {
|
|
||||||
|
|
||||||
public static String DEFAULT_KEYTAB = "src/test/resources/fake.keytab";
|
|
||||||
public static String DEFAULT_PRINCIPAL = "test@REALM.COM";
|
|
||||||
|
|
||||||
private volatile String keytab = DEFAULT_KEYTAB;
|
|
||||||
private volatile String principal = DEFAULT_PRINCIPAL;
|
|
||||||
|
|
||||||
public static final PropertyDescriptor PRINCIPAL = new PropertyDescriptor.Builder()
|
|
||||||
.name("Kerberos Principal")
|
|
||||||
.description("Kerberos principal to authenticate as. Requires nifi.kerberos.krb5.file to be set in your nifi.properties")
|
|
||||||
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
|
|
||||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
|
||||||
.required(true)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
public static final PropertyDescriptor KEYTAB = new PropertyDescriptor.Builder()
|
|
||||||
.name("Kerberos Keytab")
|
|
||||||
.description("Kerberos keytab associated with the principal. Requires nifi.kerberos.krb5.file to be set in your nifi.properties")
|
|
||||||
.identifiesExternalResource(ResourceCardinality.SINGLE, ResourceType.FILE)
|
|
||||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
|
||||||
.required(true)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
public MockKerberosCredentialsService() {
|
|
||||||
}
|
|
||||||
|
|
||||||
@OnEnabled
|
|
||||||
public void onConfigured(final ConfigurationContext context) throws InitializationException {
|
|
||||||
keytab = context.getProperty(KEYTAB).getValue();
|
|
||||||
principal = context.getProperty(PRINCIPAL).getValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getKeytab() {
|
|
||||||
return keytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getPrincipal() {
|
|
||||||
return principal;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
|
||||||
final List<PropertyDescriptor> properties = new ArrayList<>(2);
|
|
||||||
properties.add(KEYTAB);
|
|
||||||
properties.add(PRINCIPAL);
|
|
||||||
return properties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getIdentifier() {
|
|
||||||
return "kcs";
|
|
||||||
}
|
|
||||||
}
|
|
@ -37,7 +37,6 @@
|
|||||||
<module>nifi-event-transport</module>
|
<module>nifi-event-transport</module>
|
||||||
<module>nifi-file-transfer</module>
|
<module>nifi-file-transfer</module>
|
||||||
<module>nifi-hadoop-utils</module>
|
<module>nifi-hadoop-utils</module>
|
||||||
<module>nifi-kerberos-test-utils</module>
|
|
||||||
<module>nifi-listed-entity</module>
|
<module>nifi-listed-entity</module>
|
||||||
<module>nifi-migration-utils</module>
|
<module>nifi-migration-utils</module>
|
||||||
<module>nifi-put-pattern</module>
|
<module>nifi-put-pattern</module>
|
||||||
|
@ -104,10 +104,6 @@
|
|||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-distributed-cache-client-service-api</artifactId>
|
<artifactId>nifi-distributed-cache-client-service-api</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
||||||
|
@ -24,22 +24,13 @@ import org.apache.nifi.components.resource.FileResourceReference;
|
|||||||
import org.apache.nifi.components.resource.ResourceReference;
|
import org.apache.nifi.components.resource.ResourceReference;
|
||||||
import org.apache.nifi.components.resource.ResourceReferences;
|
import org.apache.nifi.components.resource.ResourceReferences;
|
||||||
import org.apache.nifi.components.resource.StandardResourceReferences;
|
import org.apache.nifi.components.resource.StandardResourceReferences;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.kerberos.KerberosCredentialsService;
|
|
||||||
import org.apache.nifi.kerberos.KerberosUserService;
|
|
||||||
import org.apache.nifi.processor.ProcessContext;
|
import org.apache.nifi.processor.ProcessContext;
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
|
||||||
import org.apache.nifi.util.MockProcessContext;
|
import org.apache.nifi.util.MockProcessContext;
|
||||||
import org.apache.nifi.util.MockValidationContext;
|
import org.apache.nifi.util.MockValidationContext;
|
||||||
import org.apache.nifi.util.NiFiProperties;
|
|
||||||
import org.apache.nifi.util.TestRunner;
|
import org.apache.nifi.util.TestRunner;
|
||||||
import org.apache.nifi.util.TestRunners;
|
import org.apache.nifi.util.TestRunners;
|
||||||
import org.junit.jupiter.api.AfterEach;
|
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -60,43 +51,17 @@ import static org.mockito.Mockito.when;
|
|||||||
|
|
||||||
public class AbstractHadoopTest {
|
public class AbstractHadoopTest {
|
||||||
|
|
||||||
private static Logger logger;
|
|
||||||
|
|
||||||
private File temporaryFile;
|
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
private NiFiProperties mockedProperties;
|
|
||||||
|
|
||||||
@BeforeAll
|
|
||||||
public static void setUpClass() {
|
|
||||||
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
|
|
||||||
System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
|
|
||||||
System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.hadoop", "debug");
|
|
||||||
logger = LoggerFactory.getLogger(AbstractHadoopTest.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void setup() throws IOException {
|
public void setup() throws IOException {
|
||||||
// needed for calls to UserGroupInformation.setConfiguration() to work when passing in
|
// needed for calls to UserGroupInformation.setConfiguration() to work when passing in
|
||||||
// config with Kerberos authentication enabled
|
// config with Kerberos authentication enabled
|
||||||
System.setProperty("java.security.krb5.realm", "nifi.com");
|
System.setProperty("java.security.krb5.realm", "nifi.com");
|
||||||
System.setProperty("java.security.krb5.kdc", "nifi.kdc");
|
System.setProperty("java.security.krb5.kdc", "nifi.kdc");
|
||||||
|
|
||||||
temporaryFile = File.createTempFile("hadoop-test", ".properties");
|
|
||||||
|
|
||||||
// mock properties and return a temporary file for the kerberos configuration
|
|
||||||
mockedProperties = mock(NiFiProperties.class);
|
|
||||||
when(mockedProperties.getKerberosConfigurationFile()).thenReturn(temporaryFile);
|
|
||||||
kerberosProperties = new KerberosProperties(temporaryFile);
|
|
||||||
}
|
|
||||||
|
|
||||||
@AfterEach
|
|
||||||
public void cleanUp() {
|
|
||||||
temporaryFile.delete();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testErrorConditions() {
|
public void testErrorConditions() {
|
||||||
SimpleHadoopProcessor processor = new SimpleHadoopProcessor(kerberosProperties);
|
SimpleHadoopProcessor processor = new SimpleHadoopProcessor(true);
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
Collection<ValidationResult> results;
|
Collection<ValidationResult> results;
|
||||||
ProcessContext pc;
|
ProcessContext pc;
|
||||||
@ -122,7 +87,7 @@ public class AbstractHadoopTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTimeoutDetection() {
|
public void testTimeoutDetection() {
|
||||||
SimpleHadoopProcessor processor = new SimpleHadoopProcessor(kerberosProperties);
|
SimpleHadoopProcessor processor = new SimpleHadoopProcessor(true);
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
assertThrows(IOException.class, () -> {
|
assertThrows(IOException.class, () -> {
|
||||||
final File brokenCoreSite = new File("src/test/resources/core-site-broken.xml");
|
final File brokenCoreSite = new File("src/test/resources/core-site-broken.xml");
|
||||||
@ -133,120 +98,9 @@ public class AbstractHadoopTest {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testKerberosOptions() {
|
|
||||||
SimpleHadoopProcessor processor = new SimpleHadoopProcessor(kerberosProperties);
|
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
|
||||||
// should be valid since no kerberos options specified
|
|
||||||
runner.assertValid();
|
|
||||||
// no longer valid since only the principal is provided
|
|
||||||
runner.setProperty(AbstractHadoopProcessor.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site-security.xml");
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosPrincipal(), "principal");
|
|
||||||
runner.assertNotValid();
|
|
||||||
// invalid since the keytab does not exist
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosKeytab(), "BAD_KEYTAB_PATH");
|
|
||||||
runner.assertNotValid();
|
|
||||||
// valid since keytab is now a valid file location
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosKeytab(), temporaryFile.getAbsolutePath());
|
|
||||||
runner.assertValid();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testKerberosOptionsWithEL() throws Exception {
|
|
||||||
SimpleHadoopProcessor processor = new SimpleHadoopProcessor(kerberosProperties);
|
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
|
||||||
|
|
||||||
// initialize the runner with EL for the kerberos properties
|
|
||||||
runner.setProperty(AbstractHadoopProcessor.HADOOP_CONFIGURATION_RESOURCES, "${variableHadoopConfigResources}");
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosPrincipal(), "${variablePrincipal}");
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosKeytab(), "${variableKeytab}");
|
|
||||||
|
|
||||||
// add variables for all the kerberos properties except for the keytab
|
|
||||||
runner.setEnvironmentVariableValue("variableHadoopConfigResources", "src/test/resources/core-site-security.xml");
|
|
||||||
runner.setEnvironmentVariableValue("variablePrincipal", "principal");
|
|
||||||
// test that the config is not valid, since the EL for keytab will return nothing, no keytab
|
|
||||||
runner.assertNotValid();
|
|
||||||
|
|
||||||
// add variable for the keytab
|
|
||||||
runner.setEnvironmentVariableValue("variableKeytab", temporaryFile.getAbsolutePath());
|
|
||||||
// test that the config is valid
|
|
||||||
runner.assertValid();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testKerberosOptionsWithBadKerberosConfigFile() throws Exception {
|
|
||||||
// invalid since the kerberos configuration was changed to a non-existent file
|
|
||||||
kerberosProperties = new KerberosProperties(new File("BAD_KERBEROS_PATH"));
|
|
||||||
|
|
||||||
SimpleHadoopProcessor processor = new SimpleHadoopProcessor(kerberosProperties);
|
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
|
||||||
runner.assertValid();
|
|
||||||
|
|
||||||
runner.setProperty(AbstractHadoopProcessor.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site-security.xml");
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosPrincipal(), "principal");
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosKeytab(), temporaryFile.getAbsolutePath());
|
|
||||||
runner.assertNotValid();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testCustomValidateWhenKerberosUserServiceProvided() throws InitializationException {
|
|
||||||
final TestRunner runner = createTestRunnerWithKerberosEnabled();
|
|
||||||
final KerberosUserService kerberosUserService = enableKerberosUserService(runner);
|
|
||||||
runner.setProperty(AbstractHadoopProcessor.KERBEROS_USER_SERVICE, kerberosUserService.getIdentifier());
|
|
||||||
runner.assertValid();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testCustomValidateWhenKerberosUserServiceAndKerberosCredentialsService() throws InitializationException {
|
|
||||||
final TestRunner runner = createTestRunnerWithKerberosEnabled();
|
|
||||||
|
|
||||||
final KerberosUserService kerberosUserService = enableKerberosUserService(runner);
|
|
||||||
runner.setProperty(AbstractHadoopProcessor.KERBEROS_USER_SERVICE, kerberosUserService.getIdentifier());
|
|
||||||
runner.assertValid();
|
|
||||||
|
|
||||||
final KerberosCredentialsService credentialsService = enabledKerberosCredentialsService(runner);
|
|
||||||
runner.setProperty(AbstractHadoopProcessor.KERBEROS_CREDENTIALS_SERVICE, credentialsService.getIdentifier());
|
|
||||||
runner.assertNotValid();
|
|
||||||
|
|
||||||
runner.removeProperty(AbstractHadoopProcessor.KERBEROS_USER_SERVICE);
|
|
||||||
runner.assertValid();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testCustomValidateWhenKerberosUserServiceAndPrincipalAndKeytab() throws InitializationException {
|
|
||||||
final TestRunner runner = createTestRunnerWithKerberosEnabled();
|
|
||||||
|
|
||||||
final KerberosUserService kerberosUserService = enableKerberosUserService(runner);
|
|
||||||
runner.setProperty(AbstractHadoopProcessor.KERBEROS_USER_SERVICE, kerberosUserService.getIdentifier());
|
|
||||||
runner.assertValid();
|
|
||||||
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosPrincipal(), "principal1");
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosKeytab(), temporaryFile.getAbsolutePath());
|
|
||||||
runner.assertNotValid();
|
|
||||||
|
|
||||||
runner.removeProperty(AbstractHadoopProcessor.KERBEROS_USER_SERVICE);
|
|
||||||
runner.assertValid();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testCustomValidateWhenKerberosUserServiceAndPrincipalAndPassword() throws InitializationException {
|
|
||||||
final TestRunner runner = createTestRunnerWithKerberosEnabled();
|
|
||||||
|
|
||||||
final KerberosUserService kerberosUserService = enableKerberosUserService(runner);
|
|
||||||
runner.setProperty(AbstractHadoopProcessor.KERBEROS_USER_SERVICE, kerberosUserService.getIdentifier());
|
|
||||||
runner.assertValid();
|
|
||||||
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosPrincipal(), "principal1");
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosPassword(), "password");
|
|
||||||
runner.assertNotValid();
|
|
||||||
|
|
||||||
runner.removeProperty(AbstractHadoopProcessor.KERBEROS_USER_SERVICE);
|
|
||||||
runner.assertValid();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLocalFileSystemInvalid() {
|
public void testLocalFileSystemInvalid() {
|
||||||
final SimpleHadoopProcessor processor = new SimpleHadoopProcessor(kerberosProperties, true, true);
|
final SimpleHadoopProcessor processor = new SimpleHadoopProcessor(true);
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
runner.setProperty(AbstractHadoopProcessor.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site.xml");
|
runner.setProperty(AbstractHadoopProcessor.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site.xml");
|
||||||
|
|
||||||
@ -261,16 +115,6 @@ public class AbstractHadoopTest {
|
|||||||
assertFalse(result.isValid(), "Hadoop File System Valid");
|
assertFalse(result.isValid(), "Hadoop File System Valid");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDistributedFileSystemValid() {
|
|
||||||
final SimpleHadoopProcessor processor = new SimpleHadoopProcessor(kerberosProperties, true, true);
|
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
|
||||||
runner.setProperty(AbstractHadoopProcessor.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site-security.xml");
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosPrincipal(), "principal");
|
|
||||||
runner.setProperty(kerberosProperties.getKerberosKeytab(), temporaryFile.getAbsolutePath());
|
|
||||||
runner.assertValid();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetNormalizedPathWithoutFileSystem() throws URISyntaxException {
|
public void testGetNormalizedPathWithoutFileSystem() throws URISyntaxException {
|
||||||
AbstractHadoopProcessor processor = initProcessorForTestGetNormalizedPath("abfs://container1@storageaccount1");
|
AbstractHadoopProcessor processor = initProcessorForTestGetNormalizedPath("abfs://container1@storageaccount1");
|
||||||
@ -330,50 +174,18 @@ public class AbstractHadoopTest {
|
|||||||
final FileSystem fileSystem = mock(FileSystem.class);
|
final FileSystem fileSystem = mock(FileSystem.class);
|
||||||
when(fileSystem.getUri()).thenReturn(new URI(fileSystemUri));
|
when(fileSystem.getUri()).thenReturn(new URI(fileSystemUri));
|
||||||
|
|
||||||
final PutHDFS processor = new PutHDFS() {
|
return new PutHDFS() {
|
||||||
@Override
|
@Override
|
||||||
protected FileSystem getFileSystem() {
|
protected FileSystem getFileSystem() {
|
||||||
return fileSystem;
|
return fileSystem;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return processor;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private TestRunner initTestRunnerForTestGetNormalizedPath(AbstractHadoopProcessor processor, String directory) throws URISyntaxException {
|
private TestRunner initTestRunnerForTestGetNormalizedPath(AbstractHadoopProcessor processor, String directory) {
|
||||||
final TestRunner runner = TestRunners.newTestRunner(processor);
|
final TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
runner.setProperty(AbstractHadoopProcessor.DIRECTORY, directory);
|
runner.setProperty(AbstractHadoopProcessor.DIRECTORY, directory);
|
||||||
|
|
||||||
return runner;
|
return runner;
|
||||||
}
|
}
|
||||||
|
|
||||||
private KerberosUserService enableKerberosUserService(final TestRunner runner) throws InitializationException {
|
|
||||||
final KerberosUserService kerberosUserService = mock(KerberosUserService.class);
|
|
||||||
when(kerberosUserService.getIdentifier()).thenReturn("userService1");
|
|
||||||
runner.addControllerService(kerberosUserService.getIdentifier(), kerberosUserService);
|
|
||||||
runner.enableControllerService(kerberosUserService);
|
|
||||||
return kerberosUserService;
|
|
||||||
}
|
|
||||||
|
|
||||||
private KerberosCredentialsService enabledKerberosCredentialsService(final TestRunner runner) throws InitializationException {
|
|
||||||
final KerberosCredentialsService credentialsService = mock(KerberosCredentialsService.class);
|
|
||||||
when(credentialsService.getIdentifier()).thenReturn("credsService1");
|
|
||||||
when(credentialsService.getPrincipal()).thenReturn("principal1");
|
|
||||||
when(credentialsService.getKeytab()).thenReturn("keytab1");
|
|
||||||
|
|
||||||
runner.addControllerService(credentialsService.getIdentifier(), credentialsService);
|
|
||||||
runner.enableControllerService(credentialsService);
|
|
||||||
return credentialsService;
|
|
||||||
}
|
|
||||||
|
|
||||||
private TestRunner createTestRunnerWithKerberosEnabled() {
|
|
||||||
final SimpleHadoopProcessor processor = new SimpleHadoopProcessor(kerberosProperties);
|
|
||||||
final TestRunner runner = TestRunners.newTestRunner(processor);
|
|
||||||
runner.assertValid();
|
|
||||||
|
|
||||||
runner.setProperty(AbstractHadoopProcessor.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site-security.xml");
|
|
||||||
runner.assertNotValid();
|
|
||||||
return runner;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -21,27 +21,20 @@ import org.apache.hadoop.conf.Configuration;
|
|||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processor.ProcessContext;
|
import org.apache.nifi.processor.ProcessContext;
|
||||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||||
import org.apache.nifi.processors.hadoop.util.SequenceFileReader;
|
import org.apache.nifi.processors.hadoop.util.SequenceFileReader;
|
||||||
import org.apache.nifi.util.MockComponentLog;
|
import org.apache.nifi.util.MockComponentLog;
|
||||||
import org.apache.nifi.util.MockProcessContext;
|
import org.apache.nifi.util.MockProcessContext;
|
||||||
import org.apache.nifi.util.NiFiProperties;
|
|
||||||
import org.apache.nifi.util.TestRunner;
|
|
||||||
import org.apache.nifi.util.TestRunners;
|
|
||||||
import org.ietf.jgss.GSSException;
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
import static org.mockito.Mockito.verifyNoMoreInteractions;
|
import static org.mockito.Mockito.verifyNoMoreInteractions;
|
||||||
@ -61,7 +54,7 @@ public class GetHDFSSequenceFileTest {
|
|||||||
fileSystem = mock(FileSystem.class);
|
fileSystem = mock(FileSystem.class);
|
||||||
userGroupInformation = mock(UserGroupInformation.class);
|
userGroupInformation = mock(UserGroupInformation.class);
|
||||||
hdfsResourcesLocal = new HdfsResources(configuration, fileSystem, userGroupInformation, null);
|
hdfsResourcesLocal = new HdfsResources(configuration, fileSystem, userGroupInformation, null);
|
||||||
getHDFSSequenceFile = new TestableGetHDFSSequenceFile(new KerberosProperties(null), userGroupInformation);
|
getHDFSSequenceFile = new TestableGetHDFSSequenceFile(userGroupInformation);
|
||||||
reloginTried = false;
|
reloginTried = false;
|
||||||
init();
|
init();
|
||||||
}
|
}
|
||||||
@ -79,7 +72,6 @@ public class GetHDFSSequenceFileTest {
|
|||||||
public void getFlowFilesWithUgiAndNewTicketShouldCallDoAsAndNotRelogin() throws Exception {
|
public void getFlowFilesWithUgiAndNewTicketShouldCallDoAsAndNotRelogin() throws Exception {
|
||||||
SequenceFileReader reader = mock(SequenceFileReader.class);
|
SequenceFileReader reader = mock(SequenceFileReader.class);
|
||||||
Path file = mock(Path.class);
|
Path file = mock(Path.class);
|
||||||
getHDFSSequenceFile.kerberosProperties = mock(KerberosProperties.class);
|
|
||||||
getHDFSSequenceFile.getFlowFiles(configuration, fileSystem, reader, file);
|
getHDFSSequenceFile.getFlowFiles(configuration, fileSystem, reader, file);
|
||||||
ArgumentCaptor<PrivilegedExceptionAction> privilegedExceptionActionArgumentCaptor = ArgumentCaptor.forClass(PrivilegedExceptionAction.class);
|
ArgumentCaptor<PrivilegedExceptionAction> privilegedExceptionActionArgumentCaptor = ArgumentCaptor.forClass(PrivilegedExceptionAction.class);
|
||||||
verifyNoMoreInteractions(reader);
|
verifyNoMoreInteractions(reader);
|
||||||
@ -91,7 +83,7 @@ public class GetHDFSSequenceFileTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetFlowFilesNoUgiShouldntCallDoAs() throws Exception {
|
public void testGetFlowFilesNoUgiShouldntCallDoAs() throws Exception {
|
||||||
getHDFSSequenceFile = new TestableGetHDFSSequenceFile(new KerberosProperties(null), null);
|
getHDFSSequenceFile = new TestableGetHDFSSequenceFile(null);
|
||||||
hdfsResourcesLocal = new HdfsResources(configuration, fileSystem, null, null);
|
hdfsResourcesLocal = new HdfsResources(configuration, fileSystem, null, null);
|
||||||
init();
|
init();
|
||||||
SequenceFileReader reader = mock(SequenceFileReader.class);
|
SequenceFileReader reader = mock(SequenceFileReader.class);
|
||||||
@ -100,44 +92,16 @@ public class GetHDFSSequenceFileTest {
|
|||||||
verify(reader).readSequenceFile(file, configuration, fileSystem);
|
verify(reader).readSequenceFile(file, configuration, fileSystem);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testGSSExceptionOnDoAs() throws Exception {
|
|
||||||
NiFiProperties mockNiFiProperties = mock(NiFiProperties.class);
|
|
||||||
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
|
|
||||||
GetHDFSSequenceFile testSubject = new TestableGetHDFSSequenceFile(getHDFSSequenceFile.kerberosProperties, userGroupInformation, true);
|
|
||||||
TestRunner runner = TestRunners.newTestRunner(testSubject);
|
|
||||||
runner.setProperty(GetHDFSSequenceFile.DIRECTORY, "path/does/not/exist");
|
|
||||||
runner.run();
|
|
||||||
// assert no flowfiles transferred to outgoing relationships
|
|
||||||
runner.assertTransferCount(MoveHDFS.REL_SUCCESS, 0);
|
|
||||||
runner.assertTransferCount(MoveHDFS.REL_FAILURE, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
public class TestableGetHDFSSequenceFile extends GetHDFSSequenceFile {
|
public class TestableGetHDFSSequenceFile extends GetHDFSSequenceFile {
|
||||||
|
|
||||||
UserGroupInformation userGroupInformation;
|
UserGroupInformation userGroupInformation;
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
|
|
||||||
|
public TestableGetHDFSSequenceFile(UserGroupInformation ugi) {
|
||||||
public TestableGetHDFSSequenceFile(KerberosProperties kerberosProperties, UserGroupInformation ugi) throws IOException {
|
userGroupInformation = ugi;
|
||||||
this(kerberosProperties, ugi, false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public TestableGetHDFSSequenceFile(KerberosProperties kerberosProperties, UserGroupInformation ugi, boolean failOnDoAs) throws IOException {
|
|
||||||
this.kerberosProperties = kerberosProperties;
|
|
||||||
this.userGroupInformation = ugi;
|
|
||||||
if (failOnDoAs && userGroupInformation != null) {
|
|
||||||
try {
|
|
||||||
when(userGroupInformation.doAs(any(PrivilegedExceptionAction.class))).thenThrow(new IOException(new GSSException(13)));
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
throw new IOException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
HdfsResources resetHDFSResources(final List<String> resourceLocations, ProcessContext context) throws IOException {
|
HdfsResources resetHDFSResources(final List<String> resourceLocations, ProcessContext context) {
|
||||||
return hdfsResourcesLocal;
|
return hdfsResourcesLocal;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -146,11 +110,6 @@ public class GetHDFSSequenceFileTest {
|
|||||||
abstractOnScheduled(context);
|
abstractOnScheduled(context);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return kerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected UserGroupInformation getUserGroupInformation() {
|
protected UserGroupInformation getUserGroupInformation() {
|
||||||
return userGroupInformation;
|
return userGroupInformation;
|
||||||
|
@ -22,24 +22,20 @@ import org.apache.hadoop.fs.Path;
|
|||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.nifi.components.ValidationResult;
|
import org.apache.nifi.components.ValidationResult;
|
||||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processor.ProcessContext;
|
import org.apache.nifi.processor.ProcessContext;
|
||||||
import org.apache.nifi.provenance.ProvenanceEventRecord;
|
import org.apache.nifi.provenance.ProvenanceEventRecord;
|
||||||
import org.apache.nifi.provenance.ProvenanceEventType;
|
import org.apache.nifi.provenance.ProvenanceEventType;
|
||||||
import org.apache.nifi.util.MockFlowFile;
|
import org.apache.nifi.util.MockFlowFile;
|
||||||
import org.apache.nifi.util.MockProcessContext;
|
import org.apache.nifi.util.MockProcessContext;
|
||||||
import org.apache.nifi.util.NiFiProperties;
|
|
||||||
import org.apache.nifi.util.TestRunner;
|
import org.apache.nifi.util.TestRunner;
|
||||||
import org.apache.nifi.util.TestRunners;
|
import org.apache.nifi.util.TestRunners;
|
||||||
import org.ietf.jgss.GSSException;
|
import org.ietf.jgss.GSSException;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.api.condition.DisabledOnOs;
|
import org.junit.jupiter.api.condition.DisabledOnOs;
|
||||||
import org.junit.jupiter.api.condition.OS;
|
import org.junit.jupiter.api.condition.OS;
|
||||||
import org.mockito.invocation.InvocationOnMock;
|
import org.mockito.invocation.InvocationOnMock;
|
||||||
import org.mockito.stubbing.Answer;
|
import org.mockito.stubbing.Answer;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
@ -59,15 +55,6 @@ import static org.mockito.Mockito.when;
|
|||||||
@DisabledOnOs(OS.WINDOWS)
|
@DisabledOnOs(OS.WINDOWS)
|
||||||
public class GetHDFSTest {
|
public class GetHDFSTest {
|
||||||
|
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
|
|
||||||
@BeforeEach
|
|
||||||
public void setup() {
|
|
||||||
NiFiProperties mockNiFiProperties = mock(NiFiProperties.class);
|
|
||||||
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
|
|
||||||
kerberosProperties = new KerberosProperties(null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getPathDifferenceTest() {
|
public void getPathDifferenceTest() {
|
||||||
assertEquals("", GetHDFS.getPathDifference(new Path("/root"), new Path("/file")));
|
assertEquals("", GetHDFS.getPathDifference(new Path("/root"), new Path("/file")));
|
||||||
@ -99,7 +86,7 @@ public class GetHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testValidators() {
|
public void testValidators() {
|
||||||
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
|
GetHDFS proc = new GetHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
Collection<ValidationResult> results;
|
Collection<ValidationResult> results;
|
||||||
ProcessContext pc;
|
ProcessContext pc;
|
||||||
@ -141,7 +128,7 @@ public class GetHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetFilesWithFilter() {
|
public void testGetFilesWithFilter() {
|
||||||
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
|
GetHDFS proc = new GetHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/testdata");
|
runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/testdata");
|
||||||
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, "random.*");
|
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, "random.*");
|
||||||
@ -156,7 +143,7 @@ public class GetHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDirectoryDoesNotExist() {
|
public void testDirectoryDoesNotExist() {
|
||||||
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
|
GetHDFS proc = new GetHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "does/not/exist/${now():format('yyyyMMdd')}");
|
runner.setProperty(PutHDFS.DIRECTORY, "does/not/exist/${now():format('yyyyMMdd')}");
|
||||||
runner.setProperty(GetHDFS.KEEP_SOURCE_FILE, "true");
|
runner.setProperty(GetHDFS.KEEP_SOURCE_FILE, "true");
|
||||||
@ -167,7 +154,7 @@ public class GetHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testAutomaticDecompression() throws IOException {
|
public void testAutomaticDecompression() throws IOException {
|
||||||
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
|
GetHDFS proc = new GetHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/testdata");
|
runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/testdata");
|
||||||
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, "random.*.gz");
|
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, "random.*.gz");
|
||||||
@ -186,7 +173,7 @@ public class GetHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testInferCompressionCodecDisabled() throws IOException {
|
public void testInferCompressionCodecDisabled() throws IOException {
|
||||||
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
|
GetHDFS proc = new GetHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/testdata");
|
runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/testdata");
|
||||||
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, "random.*.gz");
|
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, "random.*.gz");
|
||||||
@ -205,7 +192,7 @@ public class GetHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFileExtensionNotACompressionCodec() throws IOException {
|
public void testFileExtensionNotACompressionCodec() throws IOException {
|
||||||
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
|
GetHDFS proc = new GetHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/testdata");
|
runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/testdata");
|
||||||
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, ".*.zip");
|
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, ".*.zip");
|
||||||
@ -224,7 +211,7 @@ public class GetHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDirectoryUsesValidEL() throws IOException {
|
public void testDirectoryUsesValidEL() throws IOException {
|
||||||
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
|
GetHDFS proc = new GetHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/${literal('testdata'):substring(0,8)}");
|
runner.setProperty(PutHDFS.DIRECTORY, "src/test/resources/${literal('testdata'):substring(0,8)}");
|
||||||
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, ".*.zip");
|
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, ".*.zip");
|
||||||
@ -248,8 +235,8 @@ public class GetHDFSTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDirectoryUsesUnrecognizedEL() throws IOException {
|
public void testDirectoryUsesUnrecognizedEL() {
|
||||||
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
|
GetHDFS proc = new GetHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "data_${literal('testing'):substring(0,4)%7D");
|
runner.setProperty(PutHDFS.DIRECTORY, "data_${literal('testing'):substring(0,4)%7D");
|
||||||
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, ".*.zip");
|
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, ".*.zip");
|
||||||
@ -259,8 +246,8 @@ public class GetHDFSTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDirectoryUsesInvalidEL() throws IOException {
|
public void testDirectoryUsesInvalidEL() {
|
||||||
GetHDFS proc = new TestableGetHDFS(kerberosProperties);
|
GetHDFS proc = new GetHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "data_${literal('testing'):foo()}");
|
runner.setProperty(PutHDFS.DIRECTORY, "data_${literal('testing'):foo()}");
|
||||||
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, ".*.zip");
|
runner.setProperty(GetHDFS.FILE_FILTER_REGEX, ".*.zip");
|
||||||
@ -292,7 +279,7 @@ public class GetHDFSTest {
|
|||||||
FileSystem mockFileSystem = mock(FileSystem.class);
|
FileSystem mockFileSystem = mock(FileSystem.class);
|
||||||
UserGroupInformation mockUserGroupInformation = mock(UserGroupInformation.class);
|
UserGroupInformation mockUserGroupInformation = mock(UserGroupInformation.class);
|
||||||
|
|
||||||
GetHDFS testSubject = new TestableGetHDFSForUGI(kerberosProperties, mockFileSystem, mockUserGroupInformation);
|
GetHDFS testSubject = new TestableGetHDFSForUGI(mockFileSystem, mockUserGroupInformation);
|
||||||
TestRunner runner = TestRunners.newTestRunner(testSubject);
|
TestRunner runner = TestRunners.newTestRunner(testSubject);
|
||||||
runner.setProperty(GetHDFS.DIRECTORY, "src/test/resources/testdata");
|
runner.setProperty(GetHDFS.DIRECTORY, "src/test/resources/testdata");
|
||||||
|
|
||||||
@ -330,7 +317,7 @@ public class GetHDFSTest {
|
|||||||
FileSystem mockFileSystem = mock(FileSystem.class);
|
FileSystem mockFileSystem = mock(FileSystem.class);
|
||||||
UserGroupInformation mockUserGroupInformation = mock(UserGroupInformation.class);
|
UserGroupInformation mockUserGroupInformation = mock(UserGroupInformation.class);
|
||||||
|
|
||||||
GetHDFS testSubject = new TestableGetHDFSForUGI(kerberosProperties, mockFileSystem, mockUserGroupInformation);
|
GetHDFS testSubject = new TestableGetHDFSForUGI(mockFileSystem, mockUserGroupInformation);
|
||||||
TestRunner runner = TestRunners.newTestRunner(testSubject);
|
TestRunner runner = TestRunners.newTestRunner(testSubject);
|
||||||
runner.setProperty(GetHDFS.DIRECTORY, "src/test/resources/testdata");
|
runner.setProperty(GetHDFS.DIRECTORY, "src/test/resources/testdata");
|
||||||
when(mockUserGroupInformation.doAs(any(PrivilegedExceptionAction.class))).thenThrow(new IOException(new GSSException(13)));
|
when(mockUserGroupInformation.doAs(any(PrivilegedExceptionAction.class))).thenThrow(new IOException(new GSSException(13)));
|
||||||
@ -342,26 +329,11 @@ public class GetHDFSTest {
|
|||||||
runner.assertPenalizeCount(0);
|
runner.assertPenalizeCount(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class TestableGetHDFS extends GetHDFS {
|
private static class TestableGetHDFSForUGI extends GetHDFS {
|
||||||
|
|
||||||
private final KerberosProperties testKerberosProperties;
|
|
||||||
|
|
||||||
public TestableGetHDFS(KerberosProperties testKerberosProperties) {
|
|
||||||
this.testKerberosProperties = testKerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerberosProperties;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class TestableGetHDFSForUGI extends TestableGetHDFS {
|
|
||||||
private final FileSystem mockFileSystem;
|
private final FileSystem mockFileSystem;
|
||||||
private final UserGroupInformation mockUserGroupInformation;
|
private final UserGroupInformation mockUserGroupInformation;
|
||||||
|
|
||||||
public TestableGetHDFSForUGI(KerberosProperties testKerberosProperties, FileSystem mockFileSystem, UserGroupInformation mockUserGroupInformation) {
|
public TestableGetHDFSForUGI(FileSystem mockFileSystem, UserGroupInformation mockUserGroupInformation) {
|
||||||
super(testKerberosProperties);
|
|
||||||
this.mockFileSystem = mockFileSystem;
|
this.mockFileSystem = mockFileSystem;
|
||||||
this.mockUserGroupInformation = mockUserGroupInformation;
|
this.mockUserGroupInformation = mockUserGroupInformation;
|
||||||
}
|
}
|
||||||
|
@ -17,27 +17,24 @@
|
|||||||
package org.apache.nifi.processors.hadoop;
|
package org.apache.nifi.processors.hadoop;
|
||||||
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.commons.lang3.SystemUtils;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.nifi.components.ValidationResult;
|
import org.apache.nifi.components.ValidationResult;
|
||||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processor.ProcessContext;
|
import org.apache.nifi.processor.ProcessContext;
|
||||||
import org.apache.nifi.processor.Relationship;
|
import org.apache.nifi.processor.Relationship;
|
||||||
import org.apache.nifi.processors.hadoop.util.MockFileSystem;
|
import org.apache.nifi.processors.hadoop.util.MockFileSystem;
|
||||||
import org.apache.nifi.util.MockFlowFile;
|
import org.apache.nifi.util.MockFlowFile;
|
||||||
import org.apache.nifi.util.MockProcessContext;
|
import org.apache.nifi.util.MockProcessContext;
|
||||||
import org.apache.nifi.util.NiFiProperties;
|
|
||||||
import org.apache.nifi.util.TestRunner;
|
import org.apache.nifi.util.TestRunner;
|
||||||
import org.apache.nifi.util.TestRunners;
|
import org.apache.nifi.util.TestRunners;
|
||||||
import org.ietf.jgss.GSSException;
|
import org.ietf.jgss.GSSException;
|
||||||
import org.junit.jupiter.api.AfterEach;
|
import org.junit.jupiter.api.AfterEach;
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.condition.DisabledOnOs;
|
||||||
|
import org.junit.jupiter.api.condition.OS;
|
||||||
|
|
||||||
import javax.security.sasl.SaslException;
|
import javax.security.sasl.SaslException;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
@ -55,28 +52,13 @@ import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
|||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.junit.jupiter.api.Assumptions.assumeTrue;
|
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
|
@DisabledOnOs(OS.WINDOWS)
|
||||||
public class MoveHDFSTest {
|
public class MoveHDFSTest {
|
||||||
|
|
||||||
private static final String OUTPUT_DIRECTORY = "target/test-data-output";
|
private static final String OUTPUT_DIRECTORY = "target/test-data-output";
|
||||||
private static final String TEST_DATA_DIRECTORY = "src/test/resources/testdata";
|
private static final String TEST_DATA_DIRECTORY = "src/test/resources/testdata";
|
||||||
private static final String INPUT_DIRECTORY = "target/test-data-input";
|
private static final String INPUT_DIRECTORY = "target/test-data-input";
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
|
|
||||||
@BeforeAll
|
|
||||||
public static void setUpSuite() {
|
|
||||||
assumeTrue(!SystemUtils.IS_OS_WINDOWS, "Test only runs on *nix");
|
|
||||||
}
|
|
||||||
|
|
||||||
@BeforeEach
|
|
||||||
public void setup() {
|
|
||||||
NiFiProperties mockNiFiProperties = mock(NiFiProperties.class);
|
|
||||||
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
|
|
||||||
kerberosProperties = new KerberosProperties(null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@AfterEach
|
@AfterEach
|
||||||
public void teardown() {
|
public void teardown() {
|
||||||
@ -92,7 +74,7 @@ public class MoveHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testOutputDirectoryValidator() {
|
public void testOutputDirectoryValidator() {
|
||||||
MoveHDFS proc = new TestableMoveHDFS(kerberosProperties);
|
MoveHDFS proc = new TestableMoveHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
Collection<ValidationResult> results;
|
Collection<ValidationResult> results;
|
||||||
ProcessContext pc;
|
ProcessContext pc;
|
||||||
@ -112,7 +94,7 @@ public class MoveHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBothInputAndOutputDirectoriesAreValid() {
|
public void testBothInputAndOutputDirectoriesAreValid() {
|
||||||
MoveHDFS proc = new TestableMoveHDFS(kerberosProperties);
|
MoveHDFS proc = new TestableMoveHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
Collection<ValidationResult> results;
|
Collection<ValidationResult> results;
|
||||||
ProcessContext pc;
|
ProcessContext pc;
|
||||||
@ -131,7 +113,7 @@ public class MoveHDFSTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testOnScheduledShouldRunCleanly() throws IOException {
|
public void testOnScheduledShouldRunCleanly() throws IOException {
|
||||||
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
||||||
MoveHDFS proc = new TestableMoveHDFS(kerberosProperties);
|
MoveHDFS proc = new TestableMoveHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
||||||
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
||||||
@ -145,7 +127,7 @@ public class MoveHDFSTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testDotFileFilterIgnore() throws IOException {
|
public void testDotFileFilterIgnore() throws IOException {
|
||||||
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
||||||
MoveHDFS proc = new TestableMoveHDFS(kerberosProperties);
|
MoveHDFS proc = new TestableMoveHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
||||||
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
||||||
@ -161,7 +143,7 @@ public class MoveHDFSTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testDotFileFilterInclude() throws IOException {
|
public void testDotFileFilterInclude() throws IOException {
|
||||||
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
||||||
MoveHDFS proc = new TestableMoveHDFS(kerberosProperties);
|
MoveHDFS proc = new TestableMoveHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
||||||
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
||||||
@ -176,7 +158,7 @@ public class MoveHDFSTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testFileFilterRegex() throws IOException {
|
public void testFileFilterRegex() throws IOException {
|
||||||
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
||||||
MoveHDFS proc = new TestableMoveHDFS(kerberosProperties);
|
MoveHDFS proc = new TestableMoveHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
||||||
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
||||||
@ -191,7 +173,7 @@ public class MoveHDFSTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testSingleFileAsInputCopy() throws IOException {
|
public void testSingleFileAsInputCopy() throws IOException {
|
||||||
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
||||||
MoveHDFS proc = new TestableMoveHDFS(kerberosProperties);
|
MoveHDFS proc = new TestableMoveHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY + "/randombytes-1");
|
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY + "/randombytes-1");
|
||||||
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
||||||
@ -208,7 +190,7 @@ public class MoveHDFSTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testSingleFileAsInputMove() throws IOException {
|
public void testSingleFileAsInputMove() throws IOException {
|
||||||
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
||||||
MoveHDFS proc = new TestableMoveHDFS(kerberosProperties);
|
MoveHDFS proc = new TestableMoveHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY + "/randombytes-1");
|
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY + "/randombytes-1");
|
||||||
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
||||||
@ -226,7 +208,7 @@ public class MoveHDFSTest {
|
|||||||
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), new File(INPUT_DIRECTORY));
|
||||||
File subdir = new File(INPUT_DIRECTORY, "subdir");
|
File subdir = new File(INPUT_DIRECTORY, "subdir");
|
||||||
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), subdir);
|
FileUtils.copyDirectory(new File(TEST_DATA_DIRECTORY), subdir);
|
||||||
MoveHDFS proc = new TestableMoveHDFS(kerberosProperties);
|
MoveHDFS proc = new TestableMoveHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
||||||
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, OUTPUT_DIRECTORY);
|
||||||
@ -241,7 +223,7 @@ public class MoveHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testEmptyInputDirectory() throws IOException {
|
public void testEmptyInputDirectory() throws IOException {
|
||||||
MoveHDFS proc = new TestableMoveHDFS(kerberosProperties);
|
MoveHDFS proc = new TestableMoveHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
Files.createDirectories(Paths.get(INPUT_DIRECTORY));
|
Files.createDirectories(Paths.get(INPUT_DIRECTORY));
|
||||||
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, INPUT_DIRECTORY);
|
||||||
@ -263,7 +245,7 @@ public class MoveHDFSTest {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
noCredentialsFileSystem.setFailOnExists(true);
|
noCredentialsFileSystem.setFailOnExists(true);
|
||||||
TestRunner runner = TestRunners.newTestRunner(new TestableMoveHDFS(kerberosProperties, noCredentialsFileSystem));
|
TestRunner runner = TestRunners.newTestRunner(new TestableMoveHDFS(noCredentialsFileSystem));
|
||||||
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, "input/does/not/exist");
|
runner.setProperty(MoveHDFS.INPUT_DIRECTORY_OR_FILE, "input/does/not/exist");
|
||||||
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, "target/test-classes");
|
runner.setProperty(MoveHDFS.OUTPUT_DIRECTORY, "target/test-classes");
|
||||||
runner.setProperty(MoveHDFS.CONFLICT_RESOLUTION, "replace");
|
runner.setProperty(MoveHDFS.CONFLICT_RESOLUTION, "replace");
|
||||||
@ -333,23 +315,16 @@ public class MoveHDFSTest {
|
|||||||
|
|
||||||
private static class TestableMoveHDFS extends MoveHDFS {
|
private static class TestableMoveHDFS extends MoveHDFS {
|
||||||
|
|
||||||
private final KerberosProperties testKerberosProperties;
|
|
||||||
private final FileSystem fileSystem;
|
private final FileSystem fileSystem;
|
||||||
|
|
||||||
public TestableMoveHDFS(KerberosProperties testKerberosProperties) {
|
public TestableMoveHDFS() {
|
||||||
this(testKerberosProperties, null);
|
this(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TestableMoveHDFS(KerberosProperties testKerberosProperties, FileSystem fileSystem) {
|
public TestableMoveHDFS(final FileSystem fileSystem) {
|
||||||
this.testKerberosProperties = testKerberosProperties;
|
|
||||||
this.fileSystem = fileSystem;
|
this.fileSystem = fileSystem;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FileSystem getFileSystem(Configuration config) throws IOException {
|
protected FileSystem getFileSystem(Configuration config) throws IOException {
|
||||||
return fileSystem == null ? super.getFileSystem(config) : fileSystem;
|
return fileSystem == null ? super.getFileSystem(config) : fileSystem;
|
||||||
|
@ -30,7 +30,6 @@ import org.apache.nifi.fileresource.service.api.FileResourceService;
|
|||||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||||
import org.apache.nifi.processors.transfer.ResourceTransferProperties;
|
import org.apache.nifi.processors.transfer.ResourceTransferProperties;
|
||||||
import org.apache.nifi.processors.transfer.ResourceTransferSource;
|
import org.apache.nifi.processors.transfer.ResourceTransferSource;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processor.ProcessContext;
|
import org.apache.nifi.processor.ProcessContext;
|
||||||
import org.apache.nifi.processor.Relationship;
|
import org.apache.nifi.processor.Relationship;
|
||||||
import org.apache.nifi.processor.exception.ProcessException;
|
import org.apache.nifi.processor.exception.ProcessException;
|
||||||
@ -83,18 +82,16 @@ public class PutHDFSTest {
|
|||||||
private final static String FILE_NAME = "randombytes-1";
|
private final static String FILE_NAME = "randombytes-1";
|
||||||
private final static String AVRO_FILE_NAME = "input.avro";
|
private final static String AVRO_FILE_NAME = "input.avro";
|
||||||
|
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
private MockFileSystem mockFileSystem;
|
private MockFileSystem mockFileSystem;
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void setup() {
|
public void setup() {
|
||||||
kerberosProperties = new KerberosProperties(null);
|
|
||||||
mockFileSystem = new MockFileSystem();
|
mockFileSystem = new MockFileSystem();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testValidators() {
|
public void testValidators() {
|
||||||
PutHDFS proc = new TestablePutHDFS(kerberosProperties, mockFileSystem);
|
PutHDFS proc = new TestablePutHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
Collection<ValidationResult> results;
|
Collection<ValidationResult> results;
|
||||||
ProcessContext pc;
|
ProcessContext pc;
|
||||||
@ -132,7 +129,7 @@ public class PutHDFSTest {
|
|||||||
assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
|
assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
|
||||||
}
|
}
|
||||||
|
|
||||||
proc = new TestablePutHDFS(kerberosProperties, mockFileSystem);
|
proc = new TestablePutHDFS(mockFileSystem);
|
||||||
runner = TestRunners.newTestRunner(proc);
|
runner = TestRunners.newTestRunner(proc);
|
||||||
results = new HashSet<>();
|
results = new HashSet<>();
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "/target");
|
runner.setProperty(PutHDFS.DIRECTORY, "/target");
|
||||||
@ -147,7 +144,7 @@ public class PutHDFSTest {
|
|||||||
assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
|
assertTrue(vr.toString().contains("is invalid because short integer must be greater than zero"));
|
||||||
}
|
}
|
||||||
|
|
||||||
proc = new TestablePutHDFS(kerberosProperties, mockFileSystem);
|
proc = new TestablePutHDFS(mockFileSystem);
|
||||||
runner = TestRunners.newTestRunner(proc);
|
runner = TestRunners.newTestRunner(proc);
|
||||||
results = new HashSet<>();
|
results = new HashSet<>();
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "/target");
|
runner.setProperty(PutHDFS.DIRECTORY, "/target");
|
||||||
@ -162,7 +159,7 @@ public class PutHDFSTest {
|
|||||||
assertTrue(vr.toString().contains("is invalid because octal umask [-1] cannot be negative"));
|
assertTrue(vr.toString().contains("is invalid because octal umask [-1] cannot be negative"));
|
||||||
}
|
}
|
||||||
|
|
||||||
proc = new TestablePutHDFS(kerberosProperties, mockFileSystem);
|
proc = new TestablePutHDFS(mockFileSystem);
|
||||||
runner = TestRunners.newTestRunner(proc);
|
runner = TestRunners.newTestRunner(proc);
|
||||||
results = new HashSet<>();
|
results = new HashSet<>();
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "/target");
|
runner.setProperty(PutHDFS.DIRECTORY, "/target");
|
||||||
@ -191,7 +188,7 @@ public class PutHDFSTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
results = new HashSet<>();
|
results = new HashSet<>();
|
||||||
proc = new TestablePutHDFS(kerberosProperties, mockFileSystem);
|
proc = new TestablePutHDFS(mockFileSystem);
|
||||||
runner = TestRunners.newTestRunner(proc);
|
runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "/target");
|
runner.setProperty(PutHDFS.DIRECTORY, "/target");
|
||||||
runner.setProperty(PutHDFS.COMPRESSION_CODEC, CompressionCodec.class.getName());
|
runner.setProperty(PutHDFS.COMPRESSION_CODEC, CompressionCodec.class.getName());
|
||||||
@ -238,7 +235,7 @@ public class PutHDFSTest {
|
|||||||
public void testPutFile() throws IOException {
|
public void testPutFile() throws IOException {
|
||||||
// given
|
// given
|
||||||
final FileSystem spyFileSystem = Mockito.spy(mockFileSystem);
|
final FileSystem spyFileSystem = Mockito.spy(mockFileSystem);
|
||||||
final PutHDFS proc = new TestablePutHDFS(kerberosProperties, spyFileSystem);
|
final PutHDFS proc = new TestablePutHDFS(spyFileSystem);
|
||||||
final TestRunner runner = TestRunners.newTestRunner(proc);
|
final TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, TARGET_DIRECTORY);
|
runner.setProperty(PutHDFS.DIRECTORY, TARGET_DIRECTORY);
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, PutHDFS.REPLACE_RESOLUTION);
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, PutHDFS.REPLACE_RESOLUTION);
|
||||||
@ -279,7 +276,7 @@ public class PutHDFSTest {
|
|||||||
public void testPutFileWithAppendAvroModeNewFileCreated() throws IOException {
|
public void testPutFileWithAppendAvroModeNewFileCreated() throws IOException {
|
||||||
// given
|
// given
|
||||||
final FileSystem spyFileSystem = Mockito.spy(mockFileSystem);
|
final FileSystem spyFileSystem = Mockito.spy(mockFileSystem);
|
||||||
final PutHDFS proc = new TestablePutHDFS(kerberosProperties, spyFileSystem);
|
final PutHDFS proc = new TestablePutHDFS(spyFileSystem);
|
||||||
final TestRunner runner = TestRunners.newTestRunner(proc);
|
final TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, AVRO_TARGET_DIRECTORY);
|
runner.setProperty(PutHDFS.DIRECTORY, AVRO_TARGET_DIRECTORY);
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, APPEND_RESOLUTION);
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, APPEND_RESOLUTION);
|
||||||
@ -304,7 +301,7 @@ public class PutHDFSTest {
|
|||||||
public void testPutFileWithAppendAvroModeWhenTargetFileAlreadyExists() throws IOException {
|
public void testPutFileWithAppendAvroModeWhenTargetFileAlreadyExists() throws IOException {
|
||||||
// given
|
// given
|
||||||
final FileSystem spyFileSystem = Mockito.spy(mockFileSystem);
|
final FileSystem spyFileSystem = Mockito.spy(mockFileSystem);
|
||||||
final PutHDFS proc = new TestablePutHDFS(kerberosProperties, spyFileSystem);
|
final PutHDFS proc = new TestablePutHDFS(spyFileSystem);
|
||||||
final TestRunner runner = TestRunners.newTestRunner(proc);
|
final TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, AVRO_TARGET_DIRECTORY);
|
runner.setProperty(PutHDFS.DIRECTORY, AVRO_TARGET_DIRECTORY);
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, APPEND_RESOLUTION);
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, APPEND_RESOLUTION);
|
||||||
@ -331,7 +328,7 @@ public class PutHDFSTest {
|
|||||||
public void testPutFileWithSimpleWrite() throws IOException {
|
public void testPutFileWithSimpleWrite() throws IOException {
|
||||||
// given
|
// given
|
||||||
final FileSystem spyFileSystem = Mockito.spy(mockFileSystem);
|
final FileSystem spyFileSystem = Mockito.spy(mockFileSystem);
|
||||||
final PutHDFS proc = new TestablePutHDFS(kerberosProperties, spyFileSystem);
|
final PutHDFS proc = new TestablePutHDFS(spyFileSystem);
|
||||||
final TestRunner runner = TestRunners.newTestRunner(proc);
|
final TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, TARGET_DIRECTORY);
|
runner.setProperty(PutHDFS.DIRECTORY, TARGET_DIRECTORY);
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, PutHDFS.REPLACE_RESOLUTION);
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, PutHDFS.REPLACE_RESOLUTION);
|
||||||
@ -366,7 +363,7 @@ public class PutHDFSTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testPutFileWhenTargetDirExists() throws IOException {
|
public void testPutFileWhenTargetDirExists() throws IOException {
|
||||||
String targetDir = "target/test-classes";
|
String targetDir = "target/test-classes";
|
||||||
PutHDFS proc = new TestablePutHDFS(kerberosProperties, mockFileSystem);
|
PutHDFS proc = new TestablePutHDFS(mockFileSystem);
|
||||||
proc.getFileSystem().mkdirs(new Path(targetDir));
|
proc.getFileSystem().mkdirs(new Path(targetDir));
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, targetDir);
|
runner.setProperty(PutHDFS.DIRECTORY, targetDir);
|
||||||
@ -401,7 +398,7 @@ public class PutHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPutFileWithCompression() throws IOException {
|
public void testPutFileWithCompression() throws IOException {
|
||||||
PutHDFS proc = new TestablePutHDFS(kerberosProperties, mockFileSystem);
|
PutHDFS proc = new TestablePutHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes");
|
runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes");
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
||||||
@ -434,7 +431,7 @@ public class PutHDFSTest {
|
|||||||
throw new IOException("ioe", new SaslException("sasle", new GSSException(13)));
|
throw new IOException("ioe", new SaslException("sasle", new GSSException(13)));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
TestRunner runner = TestRunners.newTestRunner(new TestablePutHDFS(kerberosProperties, noCredentialsFileSystem));
|
TestRunner runner = TestRunners.newTestRunner(new TestablePutHDFS(noCredentialsFileSystem));
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes");
|
runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes");
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
||||||
|
|
||||||
@ -457,7 +454,7 @@ public class PutHDFSTest {
|
|||||||
file.mkdirs();
|
file.mkdirs();
|
||||||
Path p = new Path(dirName).makeQualified(mockFileSystem.getUri(), mockFileSystem.getWorkingDirectory());
|
Path p = new Path(dirName).makeQualified(mockFileSystem.getUri(), mockFileSystem.getWorkingDirectory());
|
||||||
|
|
||||||
TestRunner runner = TestRunners.newTestRunner(new TestablePutHDFS(kerberosProperties, mockFileSystem) {
|
TestRunner runner = TestRunners.newTestRunner(new TestablePutHDFS(mockFileSystem) {
|
||||||
@Override
|
@Override
|
||||||
protected void changeOwner(ProcessContext context, FileSystem hdfs, Path name, FlowFile flowFile) {
|
protected void changeOwner(ProcessContext context, FileSystem hdfs, Path name, FlowFile flowFile) {
|
||||||
throw new ProcessException("Forcing Exception to get thrown in order to verify proper handling");
|
throw new ProcessException("Forcing Exception to get thrown in order to verify proper handling");
|
||||||
@ -483,7 +480,7 @@ public class PutHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPutFileWhenDirectoryUsesValidELFunction() throws IOException {
|
public void testPutFileWhenDirectoryUsesValidELFunction() throws IOException {
|
||||||
PutHDFS proc = new TestablePutHDFS(kerberosProperties, mockFileSystem);
|
PutHDFS proc = new TestablePutHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "target/data_${literal('testing'):substring(0,4)}");
|
runner.setProperty(PutHDFS.DIRECTORY, "target/data_${literal('testing'):substring(0,4)}");
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
||||||
@ -509,7 +506,7 @@ public class PutHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPutFileWhenDirectoryUsesUnrecognizedEL() throws IOException {
|
public void testPutFileWhenDirectoryUsesUnrecognizedEL() throws IOException {
|
||||||
PutHDFS proc = new TestablePutHDFS(kerberosProperties, mockFileSystem);
|
PutHDFS proc = new TestablePutHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
|
|
||||||
// this value somehow causes NiFi to not even recognize the EL, and thus it returns successfully from calling
|
// this value somehow causes NiFi to not even recognize the EL, and thus it returns successfully from calling
|
||||||
@ -529,7 +526,7 @@ public class PutHDFSTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPutFileWhenDirectoryUsesInvalidEL() {
|
public void testPutFileWhenDirectoryUsesInvalidEL() {
|
||||||
PutHDFS proc = new TestablePutHDFS(kerberosProperties, mockFileSystem);
|
PutHDFS proc = new TestablePutHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
// the validator should pick up the invalid EL
|
// the validator should pick up the invalid EL
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "target/data_${literal('testing'):foo()}");
|
runner.setProperty(PutHDFS.DIRECTORY, "target/data_${literal('testing'):foo()}");
|
||||||
@ -541,7 +538,7 @@ public class PutHDFSTest {
|
|||||||
public void testPutFilePermissionsWithProcessorConfiguredUmask() throws IOException {
|
public void testPutFilePermissionsWithProcessorConfiguredUmask() throws IOException {
|
||||||
// assert the file permission is the same value as processor's property
|
// assert the file permission is the same value as processor's property
|
||||||
MockFileSystem fileSystem = new MockFileSystem();
|
MockFileSystem fileSystem = new MockFileSystem();
|
||||||
PutHDFS proc = new TestablePutHDFS(kerberosProperties, fileSystem);
|
PutHDFS proc = new TestablePutHDFS(fileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes");
|
runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes");
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
||||||
@ -563,7 +560,7 @@ public class PutHDFSTest {
|
|||||||
public void testPutFilePermissionsWithXmlConfiguredUmask() throws IOException {
|
public void testPutFilePermissionsWithXmlConfiguredUmask() throws IOException {
|
||||||
// assert the file permission is the same value as xml
|
// assert the file permission is the same value as xml
|
||||||
MockFileSystem fileSystem = new MockFileSystem();
|
MockFileSystem fileSystem = new MockFileSystem();
|
||||||
PutHDFS proc = new TestablePutHDFS(kerberosProperties, fileSystem);
|
PutHDFS proc = new TestablePutHDFS(fileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes");
|
runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes");
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
||||||
@ -584,7 +581,7 @@ public class PutHDFSTest {
|
|||||||
public void testPutFilePermissionsWithNoConfiguredUmask() throws IOException {
|
public void testPutFilePermissionsWithNoConfiguredUmask() throws IOException {
|
||||||
// assert the file permission fallback works. It should read FsPermission.DEFAULT_UMASK
|
// assert the file permission fallback works. It should read FsPermission.DEFAULT_UMASK
|
||||||
MockFileSystem fileSystem = new MockFileSystem();
|
MockFileSystem fileSystem = new MockFileSystem();
|
||||||
PutHDFS proc = new TestablePutHDFS(kerberosProperties, fileSystem);
|
PutHDFS proc = new TestablePutHDFS(fileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes");
|
runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes");
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
||||||
@ -614,7 +611,7 @@ public class PutHDFSTest {
|
|||||||
final String aclDefault = "default:user::rwx,default:group::rwx,default:other::rwx";
|
final String aclDefault = "default:user::rwx,default:group::rwx,default:other::rwx";
|
||||||
fileSystem.setAcl(directory, AclEntry.parseAclSpec(String.join(",", acl, aclDefault), true));
|
fileSystem.setAcl(directory, AclEntry.parseAclSpec(String.join(",", acl, aclDefault), true));
|
||||||
|
|
||||||
final PutHDFS processor = new TestablePutHDFS(kerberosProperties, fileSystem);
|
final PutHDFS processor = new TestablePutHDFS(fileSystem);
|
||||||
final TestRunner runner = TestRunners.newTestRunner(processor);
|
final TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, directory.toString());
|
runner.setProperty(PutHDFS.DIRECTORY, directory.toString());
|
||||||
runner.setProperty(PutHDFS.UMASK, "077");
|
runner.setProperty(PutHDFS.UMASK, "077");
|
||||||
@ -639,7 +636,7 @@ public class PutHDFSTest {
|
|||||||
final String acl = "user::rwx,group::rwx,other::rwx";
|
final String acl = "user::rwx,group::rwx,other::rwx";
|
||||||
fileSystem.setAcl(directory, AclEntry.parseAclSpec(acl, true));
|
fileSystem.setAcl(directory, AclEntry.parseAclSpec(acl, true));
|
||||||
|
|
||||||
final PutHDFS processor = new TestablePutHDFS(kerberosProperties, fileSystem);
|
final PutHDFS processor = new TestablePutHDFS(fileSystem);
|
||||||
final TestRunner runner = TestRunners.newTestRunner(processor);
|
final TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, directory.toString());
|
runner.setProperty(PutHDFS.DIRECTORY, directory.toString());
|
||||||
if (setUmaskIt) {
|
if (setUmaskIt) {
|
||||||
@ -669,7 +666,7 @@ public class PutHDFSTest {
|
|||||||
final String aclDefault = "default:user::rwx,default:group::rwx,default:other::rwx";
|
final String aclDefault = "default:user::rwx,default:group::rwx,default:other::rwx";
|
||||||
fileSystem.setAcl(directory, AclEntry.parseAclSpec(String.join(",", acl, aclDefault), true));
|
fileSystem.setAcl(directory, AclEntry.parseAclSpec(String.join(",", acl, aclDefault), true));
|
||||||
|
|
||||||
final PutHDFS processor = new TestablePutHDFS(kerberosProperties, fileSystem);
|
final PutHDFS processor = new TestablePutHDFS(fileSystem);
|
||||||
final TestRunner runner = TestRunners.newTestRunner(processor);
|
final TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, directory.toString());
|
runner.setProperty(PutHDFS.DIRECTORY, directory.toString());
|
||||||
if (setUmaskIt) {
|
if (setUmaskIt) {
|
||||||
@ -693,7 +690,7 @@ public class PutHDFSTest {
|
|||||||
file.mkdirs();
|
file.mkdirs();
|
||||||
Path p = new Path(dirName).makeQualified(mockFileSystem.getUri(), mockFileSystem.getWorkingDirectory());
|
Path p = new Path(dirName).makeQualified(mockFileSystem.getUri(), mockFileSystem.getWorkingDirectory());
|
||||||
|
|
||||||
TestRunner runner = TestRunners.newTestRunner(new TestablePutHDFS(kerberosProperties, mockFileSystem));
|
TestRunner runner = TestRunners.newTestRunner(new TestablePutHDFS(mockFileSystem));
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, dirName);
|
runner.setProperty(PutHDFS.DIRECTORY, dirName);
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
||||||
|
|
||||||
@ -715,7 +712,7 @@ public class PutHDFSTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testPutFileFromLocalFile() throws Exception {
|
public void testPutFileFromLocalFile() throws Exception {
|
||||||
final FileSystem spyFileSystem = Mockito.spy(mockFileSystem);
|
final FileSystem spyFileSystem = Mockito.spy(mockFileSystem);
|
||||||
final PutHDFS proc = new TestablePutHDFS(kerberosProperties, spyFileSystem);
|
final PutHDFS proc = new TestablePutHDFS(spyFileSystem);
|
||||||
final TestRunner runner = TestRunners.newTestRunner(proc);
|
final TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, TARGET_DIRECTORY);
|
runner.setProperty(PutHDFS.DIRECTORY, TARGET_DIRECTORY);
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, PutHDFS.REPLACE_RESOLUTION);
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, PutHDFS.REPLACE_RESOLUTION);
|
||||||
@ -776,7 +773,7 @@ public class PutHDFSTest {
|
|||||||
file.mkdirs();
|
file.mkdirs();
|
||||||
Path p = new Path(dirName).makeQualified(mockFileSystem.getUri(), mockFileSystem.getWorkingDirectory());
|
Path p = new Path(dirName).makeQualified(mockFileSystem.getUri(), mockFileSystem.getWorkingDirectory());
|
||||||
|
|
||||||
TestRunner runner = TestRunners.newTestRunner(new TestablePutHDFS(kerberosProperties, mockFileSystem));
|
TestRunner runner = TestRunners.newTestRunner(new TestablePutHDFS(mockFileSystem));
|
||||||
runner.setProperty(PutHDFS.DIRECTORY, dirName);
|
runner.setProperty(PutHDFS.DIRECTORY, dirName);
|
||||||
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace");
|
||||||
|
|
||||||
@ -819,19 +816,12 @@ public class PutHDFSTest {
|
|||||||
|
|
||||||
private static class TestablePutHDFS extends PutHDFS {
|
private static class TestablePutHDFS extends PutHDFS {
|
||||||
|
|
||||||
private final KerberosProperties testKerberosProperties;
|
|
||||||
private final FileSystem fileSystem;
|
private final FileSystem fileSystem;
|
||||||
|
|
||||||
TestablePutHDFS(KerberosProperties testKerberosProperties, FileSystem fileSystem) {
|
TestablePutHDFS(FileSystem fileSystem) {
|
||||||
this.testKerberosProperties = testKerberosProperties;
|
|
||||||
this.fileSystem = fileSystem;
|
this.fileSystem = fileSystem;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FileSystem getFileSystem(Configuration config) {
|
protected FileSystem getFileSystem(Configuration config) {
|
||||||
fileSystem.setConf(config);
|
fileSystem.setConf(config);
|
||||||
|
@ -16,26 +16,15 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.nifi.processors.hadoop;
|
package org.apache.nifi.processors.hadoop;
|
||||||
|
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processor.ProcessContext;
|
import org.apache.nifi.processor.ProcessContext;
|
||||||
import org.apache.nifi.processor.ProcessSession;
|
import org.apache.nifi.processor.ProcessSession;
|
||||||
import org.apache.nifi.processor.exception.ProcessException;
|
import org.apache.nifi.processor.exception.ProcessException;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
|
|
||||||
public class SimpleHadoopProcessor extends AbstractHadoopProcessor {
|
public class SimpleHadoopProcessor extends AbstractHadoopProcessor {
|
||||||
|
|
||||||
private KerberosProperties testKerberosProperties;
|
private final boolean localFileSystemAccessDenied;
|
||||||
private boolean allowExplicitKeytab;
|
|
||||||
private boolean localFileSystemAccessDenied;
|
|
||||||
|
|
||||||
public SimpleHadoopProcessor(KerberosProperties kerberosProperties) {
|
public SimpleHadoopProcessor(final boolean localFileSystemAccessDenied) {
|
||||||
this(kerberosProperties, true, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
public SimpleHadoopProcessor(KerberosProperties kerberosProperties, boolean allowExplicitKeytab, boolean localFileSystemAccessDenied) {
|
|
||||||
this.testKerberosProperties = kerberosProperties;
|
|
||||||
this.allowExplicitKeytab = allowExplicitKeytab;
|
|
||||||
this.localFileSystemAccessDenied = localFileSystemAccessDenied;
|
this.localFileSystemAccessDenied = localFileSystemAccessDenied;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -43,16 +32,6 @@ public class SimpleHadoopProcessor extends AbstractHadoopProcessor {
|
|||||||
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
|
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
boolean isAllowExplicitKeytab() {
|
|
||||||
return allowExplicitKeytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
boolean isLocalFileSystemAccessDenied() {
|
boolean isLocalFileSystemAccessDenied() {
|
||||||
return localFileSystemAccessDenied;
|
return localFileSystemAccessDenied;
|
||||||
|
@ -25,13 +25,10 @@ import org.apache.nifi.components.AllowableValue;
|
|||||||
import org.apache.nifi.components.PropertyDescriptor;
|
import org.apache.nifi.components.PropertyDescriptor;
|
||||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||||
import org.apache.nifi.flowfile.attributes.StandardFlowFileMediaType;
|
import org.apache.nifi.flowfile.attributes.StandardFlowFileMediaType;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.util.MockFlowFile;
|
import org.apache.nifi.util.MockFlowFile;
|
||||||
import org.apache.nifi.util.NiFiProperties;
|
|
||||||
import org.apache.nifi.util.TestRunner;
|
import org.apache.nifi.util.TestRunner;
|
||||||
import org.apache.nifi.util.TestRunners;
|
import org.apache.nifi.util.TestRunners;
|
||||||
import org.junit.jupiter.api.AfterEach;
|
import org.junit.jupiter.api.AfterEach;
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
@ -45,8 +42,6 @@ import java.util.Map;
|
|||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
public class TestCreateHadoopSequenceFile {
|
public class TestCreateHadoopSequenceFile {
|
||||||
|
|
||||||
@ -57,22 +52,9 @@ public class TestCreateHadoopSequenceFile {
|
|||||||
new File(testdata, "randombytes-2"), new File(testdata, "randombytes-3")
|
new File(testdata, "randombytes-2"), new File(testdata, "randombytes-3")
|
||||||
};
|
};
|
||||||
|
|
||||||
private NiFiProperties mockNiFiProperties;
|
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
|
|
||||||
@BeforeAll
|
|
||||||
public static void setUpClass() {
|
|
||||||
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
|
|
||||||
System.setProperty("org.slf4j.simpleLogger.log.nifi.processors.hadoop", "debug");
|
|
||||||
}
|
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void setUp() {
|
public void setUp() {
|
||||||
mockNiFiProperties = mock(NiFiProperties.class);
|
CreateHadoopSequenceFile proc = new CreateHadoopSequenceFile();
|
||||||
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
|
|
||||||
kerberosProperties = new KerberosProperties(null);
|
|
||||||
|
|
||||||
CreateHadoopSequenceFile proc = new TestableCreateHadoopSequenceFile(kerberosProperties);
|
|
||||||
controller = TestRunners.newTestRunner(proc);
|
controller = TestRunners.newTestRunner(proc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -108,7 +90,7 @@ public class TestCreateHadoopSequenceFile {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSequenceFileSaysValueIsBytesWritable() throws UnsupportedEncodingException, IOException {
|
public void testSequenceFileSaysValueIsBytesWritable() throws IOException {
|
||||||
for (File inFile : inFiles) {
|
for (File inFile : inFiles) {
|
||||||
try (FileInputStream fin = new FileInputStream(inFile)) {
|
try (FileInputStream fin = new FileInputStream(inFile)) {
|
||||||
controller.enqueue(fin);
|
controller.enqueue(fin);
|
||||||
@ -344,19 +326,4 @@ public class TestCreateHadoopSequenceFile {
|
|||||||
|
|
||||||
assertEquals(DefaultCodec.class.getCanonicalName(), new String(data, codecTypeStartIndex, codecTypeSize, "UTF-8"));
|
assertEquals(DefaultCodec.class.getCanonicalName(), new String(data, codecTypeStartIndex, codecTypeSize, "UTF-8"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class TestableCreateHadoopSequenceFile extends CreateHadoopSequenceFile {
|
|
||||||
|
|
||||||
private KerberosProperties testKerbersProperties;
|
|
||||||
|
|
||||||
public TestableCreateHadoopSequenceFile(KerberosProperties testKerbersProperties) {
|
|
||||||
this.testKerbersProperties = testKerbersProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerbersProperties;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -20,18 +20,15 @@ import com.google.common.collect.Maps;
|
|||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.provenance.ProvenanceEventRecord;
|
import org.apache.nifi.provenance.ProvenanceEventRecord;
|
||||||
import org.apache.nifi.provenance.ProvenanceEventType;
|
import org.apache.nifi.provenance.ProvenanceEventType;
|
||||||
import org.apache.nifi.util.MockFlowFile;
|
import org.apache.nifi.util.MockFlowFile;
|
||||||
import org.apache.nifi.util.NiFiProperties;
|
|
||||||
import org.apache.nifi.util.TestRunner;
|
import org.apache.nifi.util.TestRunner;
|
||||||
import org.apache.nifi.util.TestRunners;
|
import org.apache.nifi.util.TestRunners;
|
||||||
import org.ietf.jgss.GSSException;
|
import org.ietf.jgss.GSSException;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -47,13 +44,9 @@ import static org.mockito.Mockito.when;
|
|||||||
|
|
||||||
public class TestDeleteHDFS {
|
public class TestDeleteHDFS {
|
||||||
private FileSystem mockFileSystem;
|
private FileSystem mockFileSystem;
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void setup() throws Exception {
|
public void setup() throws Exception {
|
||||||
NiFiProperties mockNiFiProperties = mock(NiFiProperties.class);
|
|
||||||
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
|
|
||||||
kerberosProperties = new KerberosProperties(null);
|
|
||||||
mockFileSystem = mock(FileSystem.class);
|
mockFileSystem = mock(FileSystem.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -63,7 +56,7 @@ public class TestDeleteHDFS {
|
|||||||
Path filePath = new Path("/some/path/to/file.txt");
|
Path filePath = new Path("/some/path/to/file.txt");
|
||||||
when(mockFileSystem.exists(any(Path.class))).thenReturn(true);
|
when(mockFileSystem.exists(any(Path.class))).thenReturn(true);
|
||||||
when(mockFileSystem.getUri()).thenReturn(new URI("hdfs://0.example.com:8020"));
|
when(mockFileSystem.getUri()).thenReturn(new URI("hdfs://0.example.com:8020"));
|
||||||
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(kerberosProperties, mockFileSystem);
|
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
||||||
runner.setIncomingConnection(false);
|
runner.setIncomingConnection(false);
|
||||||
runner.assertNotValid();
|
runner.assertNotValid();
|
||||||
@ -89,7 +82,7 @@ public class TestDeleteHDFS {
|
|||||||
Path filePath = new Path("/some/path/to/file.txt");
|
Path filePath = new Path("/some/path/to/file.txt");
|
||||||
when(mockFileSystem.exists(any(Path.class))).thenReturn(true);
|
when(mockFileSystem.exists(any(Path.class))).thenReturn(true);
|
||||||
when(mockFileSystem.getUri()).thenReturn(new URI("hdfs://0.example.com:8020"));
|
when(mockFileSystem.getUri()).thenReturn(new URI("hdfs://0.example.com:8020"));
|
||||||
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(kerberosProperties, mockFileSystem);
|
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
||||||
runner.setProperty(DeleteHDFS.FILE_OR_DIRECTORY, "${hdfs.file}");
|
runner.setProperty(DeleteHDFS.FILE_OR_DIRECTORY, "${hdfs.file}");
|
||||||
Map<String, String> attributes = Maps.newHashMap();
|
Map<String, String> attributes = Maps.newHashMap();
|
||||||
@ -104,7 +97,7 @@ public class TestDeleteHDFS {
|
|||||||
public void testIOException() throws Exception {
|
public void testIOException() throws Exception {
|
||||||
Path filePath = new Path("/some/path/to/file.txt");
|
Path filePath = new Path("/some/path/to/file.txt");
|
||||||
when(mockFileSystem.exists(any(Path.class))).thenThrow(new IOException());
|
when(mockFileSystem.exists(any(Path.class))).thenThrow(new IOException());
|
||||||
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(kerberosProperties, mockFileSystem);
|
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
||||||
runner.setProperty(DeleteHDFS.FILE_OR_DIRECTORY, "${hdfs.file}");
|
runner.setProperty(DeleteHDFS.FILE_OR_DIRECTORY, "${hdfs.file}");
|
||||||
Map<String, String> attributes = Maps.newHashMap();
|
Map<String, String> attributes = Maps.newHashMap();
|
||||||
@ -118,7 +111,7 @@ public class TestDeleteHDFS {
|
|||||||
public void testGSSException() throws Exception {
|
public void testGSSException() throws Exception {
|
||||||
Path filePath = new Path("/some/path/to/file.txt");
|
Path filePath = new Path("/some/path/to/file.txt");
|
||||||
when(mockFileSystem.exists(any(Path.class))).thenThrow(new IOException(new GSSException(13)));
|
when(mockFileSystem.exists(any(Path.class))).thenThrow(new IOException(new GSSException(13)));
|
||||||
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(kerberosProperties, mockFileSystem);
|
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
||||||
runner.setProperty(DeleteHDFS.FILE_OR_DIRECTORY, "${hdfs.file}");
|
runner.setProperty(DeleteHDFS.FILE_OR_DIRECTORY, "${hdfs.file}");
|
||||||
Map<String, String> attributes = Maps.newHashMap();
|
Map<String, String> attributes = Maps.newHashMap();
|
||||||
@ -135,7 +128,7 @@ public class TestDeleteHDFS {
|
|||||||
Path filePath = new Path("/some/path/to/file.txt");
|
Path filePath = new Path("/some/path/to/file.txt");
|
||||||
when(mockFileSystem.exists(any(Path.class))).thenReturn(true);
|
when(mockFileSystem.exists(any(Path.class))).thenReturn(true);
|
||||||
when(mockFileSystem.delete(any(Path.class), any(Boolean.class))).thenThrow(new IOException("Permissions Error"));
|
when(mockFileSystem.delete(any(Path.class), any(Boolean.class))).thenThrow(new IOException("Permissions Error"));
|
||||||
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(kerberosProperties, mockFileSystem);
|
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
||||||
runner.setProperty(DeleteHDFS.FILE_OR_DIRECTORY, "${hdfs.file}");
|
runner.setProperty(DeleteHDFS.FILE_OR_DIRECTORY, "${hdfs.file}");
|
||||||
Map<String, String> attributes = Maps.newHashMap();
|
Map<String, String> attributes = Maps.newHashMap();
|
||||||
@ -152,7 +145,7 @@ public class TestDeleteHDFS {
|
|||||||
@Test
|
@Test
|
||||||
public void testNoFlowFilesWithIncomingConnection() {
|
public void testNoFlowFilesWithIncomingConnection() {
|
||||||
Path filePath = new Path("${hdfs.file}");
|
Path filePath = new Path("${hdfs.file}");
|
||||||
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(kerberosProperties, mockFileSystem);
|
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
||||||
runner.setProperty(DeleteHDFS.FILE_OR_DIRECTORY, filePath.toString());
|
runner.setProperty(DeleteHDFS.FILE_OR_DIRECTORY, filePath.toString());
|
||||||
runner.setIncomingConnection(true);
|
runner.setIncomingConnection(true);
|
||||||
@ -166,7 +159,7 @@ public class TestDeleteHDFS {
|
|||||||
public void testUnsuccessfulDelete() throws Exception {
|
public void testUnsuccessfulDelete() throws Exception {
|
||||||
Path filePath = new Path("/some/path/to/file.txt");
|
Path filePath = new Path("/some/path/to/file.txt");
|
||||||
when(mockFileSystem.exists(any(Path.class))).thenReturn(false);
|
when(mockFileSystem.exists(any(Path.class))).thenReturn(false);
|
||||||
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(kerberosProperties, mockFileSystem);
|
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
||||||
runner.setIncomingConnection(false);
|
runner.setIncomingConnection(false);
|
||||||
runner.assertNotValid();
|
runner.assertNotValid();
|
||||||
@ -190,7 +183,7 @@ public class TestDeleteHDFS {
|
|||||||
when(mockFileSystem.exists(any(Path.class))).thenReturn(true);
|
when(mockFileSystem.exists(any(Path.class))).thenReturn(true);
|
||||||
when(mockFileSystem.globStatus(any(Path.class))).thenReturn(fileStatuses);
|
when(mockFileSystem.globStatus(any(Path.class))).thenReturn(fileStatuses);
|
||||||
when(mockFileSystem.getUri()).thenReturn(new URI("hdfs://0.example.com:8020"));
|
when(mockFileSystem.getUri()).thenReturn(new URI("hdfs://0.example.com:8020"));
|
||||||
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(kerberosProperties, mockFileSystem);
|
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
||||||
runner.setIncomingConnection(false);
|
runner.setIncomingConnection(false);
|
||||||
runner.assertNotValid();
|
runner.assertNotValid();
|
||||||
@ -214,7 +207,7 @@ public class TestDeleteHDFS {
|
|||||||
when(mockFileSystem.exists(any(Path.class))).thenReturn(true);
|
when(mockFileSystem.exists(any(Path.class))).thenReturn(true);
|
||||||
when(mockFileSystem.globStatus(any(Path.class))).thenReturn(fileStatuses);
|
when(mockFileSystem.globStatus(any(Path.class))).thenReturn(fileStatuses);
|
||||||
when(mockFileSystem.getUri()).thenReturn(new URI("hdfs://0.example.com:8020"));
|
when(mockFileSystem.getUri()).thenReturn(new URI("hdfs://0.example.com:8020"));
|
||||||
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(kerberosProperties, mockFileSystem);
|
DeleteHDFS deleteHDFS = new TestableDeleteHDFS(mockFileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
TestRunner runner = TestRunners.newTestRunner(deleteHDFS);
|
||||||
runner.setIncomingConnection(true);
|
runner.setIncomingConnection(true);
|
||||||
Map<String, String> attributes = Maps.newHashMap();
|
Map<String, String> attributes = Maps.newHashMap();
|
||||||
@ -227,19 +220,12 @@ public class TestDeleteHDFS {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static class TestableDeleteHDFS extends DeleteHDFS {
|
private static class TestableDeleteHDFS extends DeleteHDFS {
|
||||||
private KerberosProperties testKerberosProperties;
|
private final FileSystem mockFileSystem;
|
||||||
private FileSystem mockFileSystem;
|
|
||||||
|
|
||||||
public TestableDeleteHDFS(KerberosProperties kerberosProperties, FileSystem mockFileSystem) {
|
public TestableDeleteHDFS(FileSystem mockFileSystem) {
|
||||||
this.testKerberosProperties = kerberosProperties;
|
|
||||||
this.mockFileSystem = mockFileSystem;
|
this.mockFileSystem = mockFileSystem;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FileSystem getFileSystem() {
|
protected FileSystem getFileSystem() {
|
||||||
return mockFileSystem;
|
return mockFileSystem;
|
||||||
|
@ -18,12 +18,10 @@ package org.apache.nifi.processors.hadoop;
|
|||||||
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processors.hadoop.util.MockFileSystem;
|
import org.apache.nifi.processors.hadoop.util.MockFileSystem;
|
||||||
import org.apache.nifi.provenance.ProvenanceEventRecord;
|
import org.apache.nifi.provenance.ProvenanceEventRecord;
|
||||||
import org.apache.nifi.provenance.ProvenanceEventType;
|
import org.apache.nifi.provenance.ProvenanceEventType;
|
||||||
import org.apache.nifi.util.MockFlowFile;
|
import org.apache.nifi.util.MockFlowFile;
|
||||||
import org.apache.nifi.util.NiFiProperties;
|
|
||||||
import org.apache.nifi.util.TestRunner;
|
import org.apache.nifi.util.TestRunner;
|
||||||
import org.apache.nifi.util.TestRunners;
|
import org.apache.nifi.util.TestRunners;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
@ -40,21 +38,14 @@ import static org.apache.nifi.processors.hadoop.AbstractHadoopProcessor.HADOOP_F
|
|||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
public class TestFetchHDFS {
|
public class TestFetchHDFS {
|
||||||
|
|
||||||
private TestRunner runner;
|
private TestRunner runner;
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void setup() {
|
public void setup() {
|
||||||
NiFiProperties mockNiFiProperties = mock(NiFiProperties.class);
|
FetchHDFS proc = new TestableFetchHDFS();
|
||||||
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
|
|
||||||
kerberosProperties = new KerberosProperties(null);
|
|
||||||
|
|
||||||
TestableFetchHDFS proc = new TestableFetchHDFS(kerberosProperties);
|
|
||||||
runner = TestRunners.newTestRunner(proc);
|
runner = TestRunners.newTestRunner(proc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -143,7 +134,7 @@ public class TestFetchHDFS {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testAutomaticDecompression() throws IOException {
|
public void testAutomaticDecompression() throws IOException {
|
||||||
FetchHDFS proc = new TestableFetchHDFS(kerberosProperties);
|
FetchHDFS proc = new TestableFetchHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(FetchHDFS.FILENAME, "src/test/resources/testdata/randombytes-1.gz");
|
runner.setProperty(FetchHDFS.FILENAME, "src/test/resources/testdata/randombytes-1.gz");
|
||||||
runner.setProperty(FetchHDFS.COMPRESSION_CODEC, "AUTOMATIC");
|
runner.setProperty(FetchHDFS.COMPRESSION_CODEC, "AUTOMATIC");
|
||||||
@ -161,7 +152,7 @@ public class TestFetchHDFS {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testInferCompressionCodecDisabled() throws IOException {
|
public void testInferCompressionCodecDisabled() throws IOException {
|
||||||
FetchHDFS proc = new TestableFetchHDFS(kerberosProperties);
|
FetchHDFS proc = new TestableFetchHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(FetchHDFS.FILENAME, "src/test/resources/testdata/randombytes-1.gz");
|
runner.setProperty(FetchHDFS.FILENAME, "src/test/resources/testdata/randombytes-1.gz");
|
||||||
runner.setProperty(FetchHDFS.COMPRESSION_CODEC, "NONE");
|
runner.setProperty(FetchHDFS.COMPRESSION_CODEC, "NONE");
|
||||||
@ -179,7 +170,7 @@ public class TestFetchHDFS {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFileExtensionNotACompressionCodec() throws IOException {
|
public void testFileExtensionNotACompressionCodec() throws IOException {
|
||||||
FetchHDFS proc = new TestableFetchHDFS(kerberosProperties);
|
FetchHDFS proc = new TestableFetchHDFS();
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(FetchHDFS.FILENAME, "src/test/resources/testdata/13545423550275052.zip");
|
runner.setProperty(FetchHDFS.FILENAME, "src/test/resources/testdata/13545423550275052.zip");
|
||||||
runner.setProperty(FetchHDFS.COMPRESSION_CODEC, "AUTOMATIC");
|
runner.setProperty(FetchHDFS.COMPRESSION_CODEC, "AUTOMATIC");
|
||||||
@ -196,10 +187,10 @@ public class TestFetchHDFS {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGSSException() throws IOException {
|
public void testGSSException() {
|
||||||
MockFileSystem fileSystem = new MockFileSystem();
|
MockFileSystem fileSystem = new MockFileSystem();
|
||||||
fileSystem.setFailOnOpen(true);
|
fileSystem.setFailOnOpen(true);
|
||||||
FetchHDFS proc = new TestableFetchHDFS(kerberosProperties, fileSystem);
|
FetchHDFS proc = new TestableFetchHDFS(fileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(FetchHDFS.FILENAME, "src/test/resources/testdata/randombytes-1.gz");
|
runner.setProperty(FetchHDFS.FILENAME, "src/test/resources/testdata/randombytes-1.gz");
|
||||||
runner.setProperty(FetchHDFS.COMPRESSION_CODEC, "NONE");
|
runner.setProperty(FetchHDFS.COMPRESSION_CODEC, "NONE");
|
||||||
@ -218,7 +209,7 @@ public class TestFetchHDFS {
|
|||||||
public void testRuntimeException() {
|
public void testRuntimeException() {
|
||||||
MockFileSystem fileSystem = new MockFileSystem();
|
MockFileSystem fileSystem = new MockFileSystem();
|
||||||
fileSystem.setRuntimeFailOnOpen(true);
|
fileSystem.setRuntimeFailOnOpen(true);
|
||||||
FetchHDFS proc = new TestableFetchHDFS(kerberosProperties, fileSystem);
|
FetchHDFS proc = new TestableFetchHDFS(fileSystem);
|
||||||
TestRunner runner = TestRunners.newTestRunner(proc);
|
TestRunner runner = TestRunners.newTestRunner(proc);
|
||||||
runner.setProperty(FetchHDFS.FILENAME, "src/test/resources/testdata/randombytes-1.gz");
|
runner.setProperty(FetchHDFS.FILENAME, "src/test/resources/testdata/randombytes-1.gz");
|
||||||
runner.setProperty(FetchHDFS.COMPRESSION_CODEC, "NONE");
|
runner.setProperty(FetchHDFS.COMPRESSION_CODEC, "NONE");
|
||||||
@ -234,23 +225,15 @@ public class TestFetchHDFS {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static class TestableFetchHDFS extends FetchHDFS {
|
private static class TestableFetchHDFS extends FetchHDFS {
|
||||||
private final KerberosProperties testKerberosProps;
|
|
||||||
private final FileSystem fileSystem;
|
private final FileSystem fileSystem;
|
||||||
|
|
||||||
public TestableFetchHDFS(KerberosProperties testKerberosProps) {
|
public TestableFetchHDFS() {
|
||||||
this.testKerberosProps = testKerberosProps;
|
|
||||||
this.fileSystem = null;
|
this.fileSystem = null;
|
||||||
}
|
}
|
||||||
public TestableFetchHDFS(KerberosProperties testKerberosProps, final FileSystem fileSystem) {
|
public TestableFetchHDFS(final FileSystem fileSystem) {
|
||||||
this.testKerberosProps = testKerberosProps;
|
|
||||||
this.fileSystem = fileSystem;
|
this.fileSystem = fileSystem;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerberosProps;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FileSystem getFileSystem() {
|
protected FileSystem getFileSystem() {
|
||||||
return fileSystem == null ? super.getFileSystem() : fileSystem;
|
return fileSystem == null ? super.getFileSystem() : fileSystem;
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
package org.apache.nifi.processors.hadoop;
|
package org.apache.nifi.processors.hadoop;
|
||||||
|
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
@ -31,43 +30,33 @@ import org.apache.hadoop.conf.Configuration;
|
|||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.nifi.components.AllowableValue;
|
import org.apache.nifi.components.AllowableValue;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processor.ProcessContext;
|
import org.apache.nifi.processor.ProcessContext;
|
||||||
import org.apache.nifi.processors.hadoop.util.MockFileSystem;
|
import org.apache.nifi.processors.hadoop.util.MockFileSystem;
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
import org.apache.nifi.reporting.InitializationException;
|
||||||
import org.apache.nifi.util.MockFlowFile;
|
import org.apache.nifi.util.MockFlowFile;
|
||||||
import org.apache.nifi.util.NiFiProperties;
|
|
||||||
import org.apache.nifi.util.TestRunner;
|
import org.apache.nifi.util.TestRunner;
|
||||||
import org.apache.nifi.util.TestRunners;
|
import org.apache.nifi.util.TestRunners;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
public class TestGetHDFSFileInfo {
|
public class TestGetHDFSFileInfo {
|
||||||
private static final Pattern SINGLE_JSON_PATTERN = Pattern.compile("^\\{[^\\}]*\\}$");
|
private static final Pattern SINGLE_JSON_PATTERN = Pattern.compile("^\\{[^\\}]*\\}$");
|
||||||
|
|
||||||
private TestRunner runner;
|
private TestRunner runner;
|
||||||
private GetHDFSFileInfoWithMockedFileSystem proc;
|
private GetHDFSFileInfoWithMockedFileSystem proc;
|
||||||
private NiFiProperties mockNiFiProperties;
|
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void setup() throws InitializationException {
|
public void setup() throws InitializationException {
|
||||||
mockNiFiProperties = mock(NiFiProperties.class);
|
proc = new GetHDFSFileInfoWithMockedFileSystem();
|
||||||
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
|
|
||||||
kerberosProperties = new KerberosProperties(null);
|
|
||||||
|
|
||||||
proc = new GetHDFSFileInfoWithMockedFileSystem(kerberosProperties);
|
|
||||||
runner = TestRunners.newTestRunner(proc);
|
runner = TestRunners.newTestRunner(proc);
|
||||||
|
|
||||||
runner.setProperty(GetHDFSFileInfo.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site.xml");
|
runner.setProperty(GetHDFSFileInfo.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site.xml");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testInvalidBatchSizeWhenDestinationAndGroupingDoesntAllowBatchSize() throws Exception {
|
public void testInvalidBatchSizeWhenDestinationAndGroupingDoesntAllowBatchSize() {
|
||||||
Arrays.asList("1", "2", "100").forEach(
|
Arrays.asList("1", "2", "100").forEach(
|
||||||
validBatchSize -> {
|
validBatchSize -> {
|
||||||
testValidateBatchSize(GetHDFSFileInfo.DESTINATION_ATTRIBUTES, GetHDFSFileInfo.GROUP_ALL, validBatchSize, false);
|
testValidateBatchSize(GetHDFSFileInfo.DESTINATION_ATTRIBUTES, GetHDFSFileInfo.GROUP_ALL, validBatchSize, false);
|
||||||
@ -80,7 +69,7 @@ public class TestGetHDFSFileInfo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testInvalidBatchSizeWhenValueIsInvalid() throws Exception {
|
public void testInvalidBatchSizeWhenValueIsInvalid() {
|
||||||
Arrays.asList("-1", "0", "someString").forEach(
|
Arrays.asList("-1", "0", "someString").forEach(
|
||||||
inValidBatchSize -> {
|
inValidBatchSize -> {
|
||||||
testValidateBatchSize(GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, inValidBatchSize, false);
|
testValidateBatchSize(GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, inValidBatchSize, false);
|
||||||
@ -89,7 +78,7 @@ public class TestGetHDFSFileInfo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testValidBatchSize() throws Exception {
|
public void testValidBatchSize() {
|
||||||
Arrays.asList("1", "2", "100").forEach(
|
Arrays.asList("1", "2", "100").forEach(
|
||||||
validBatchSize -> {
|
validBatchSize -> {
|
||||||
testValidateBatchSize(GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, validBatchSize, true);
|
testValidateBatchSize(GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, validBatchSize, true);
|
||||||
@ -646,63 +635,63 @@ public class TestGetHDFSFileInfo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestAttributesGroupAllBatchSizeNull() throws Exception {
|
public void testBatchSizeWithDestAttributesGroupAllBatchSizeNull() {
|
||||||
testBatchSize(null, GetHDFSFileInfo.DESTINATION_ATTRIBUTES, GetHDFSFileInfo.GROUP_ALL, 1);
|
testBatchSize(null, GetHDFSFileInfo.DESTINATION_ATTRIBUTES, GetHDFSFileInfo.GROUP_ALL, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestAttributesGroupDirBatchSizeNull() throws Exception {
|
public void testBatchSizeWithDestAttributesGroupDirBatchSizeNull() {
|
||||||
testBatchSize(null, GetHDFSFileInfo.DESTINATION_ATTRIBUTES, GetHDFSFileInfo.GROUP_PARENT_DIR, 5);
|
testBatchSize(null, GetHDFSFileInfo.DESTINATION_ATTRIBUTES, GetHDFSFileInfo.GROUP_PARENT_DIR, 5);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestAttributesGroupNoneBatchSizeNull() throws Exception {
|
public void testBatchSizeWithDestAttributesGroupNoneBatchSizeNull() {
|
||||||
testBatchSize(null, GetHDFSFileInfo.DESTINATION_ATTRIBUTES, GetHDFSFileInfo.GROUP_NONE, 9);
|
testBatchSize(null, GetHDFSFileInfo.DESTINATION_ATTRIBUTES, GetHDFSFileInfo.GROUP_NONE, 9);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestContentGroupAllBatchSizeNull() throws Exception {
|
public void testBatchSizeWithDestContentGroupAllBatchSizeNull() {
|
||||||
testBatchSize(null, GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_ALL, 1);
|
testBatchSize(null, GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_ALL, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestContentGroupDirBatchSizeNull() throws Exception {
|
public void testBatchSizeWithDestContentGroupDirBatchSizeNull() {
|
||||||
testBatchSize(null, GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_PARENT_DIR, 5);
|
testBatchSize(null, GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_PARENT_DIR, 5);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestContentGroupNoneBatchSizeNull() throws Exception {
|
public void testBatchSizeWithDestContentGroupNoneBatchSizeNull() {
|
||||||
testBatchSize(null, GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 9);
|
testBatchSize(null, GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 9);
|
||||||
|
|
||||||
checkContentSizes(Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1, 1));
|
checkContentSizes(Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1, 1));
|
||||||
}
|
}
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestContentGroupNoneBatchSize1() throws Exception {
|
public void testBatchSizeWithDestContentGroupNoneBatchSize1() {
|
||||||
testBatchSize("1", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 9);
|
testBatchSize("1", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 9);
|
||||||
checkContentSizes(Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1, 1));
|
checkContentSizes(Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1, 1));
|
||||||
}
|
}
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestContentGroupNoneBatchSize3() throws Exception {
|
public void testBatchSizeWithDestContentGroupNoneBatchSize3() {
|
||||||
testBatchSize("3", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 3);
|
testBatchSize("3", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 3);
|
||||||
checkContentSizes(Arrays.asList(3, 3, 3));
|
checkContentSizes(Arrays.asList(3, 3, 3));
|
||||||
}
|
}
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestContentGroupNoneBatchSize4() throws Exception {
|
public void testBatchSizeWithDestContentGroupNoneBatchSize4() {
|
||||||
testBatchSize("4", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 3);
|
testBatchSize("4", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 3);
|
||||||
checkContentSizes(Arrays.asList(4, 4, 1));
|
checkContentSizes(Arrays.asList(4, 4, 1));
|
||||||
}
|
}
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestContentGroupNoneBatchSize5() throws Exception {
|
public void testBatchSizeWithDestContentGroupNoneBatchSize5() {
|
||||||
testBatchSize("5", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 2);
|
testBatchSize("5", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 2);
|
||||||
checkContentSizes(Arrays.asList(5, 4));
|
checkContentSizes(Arrays.asList(5, 4));
|
||||||
}
|
}
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestContentGroupNoneBatchSize9() throws Exception {
|
public void testBatchSizeWithDestContentGroupNoneBatchSize9() {
|
||||||
testBatchSize("9", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 1);
|
testBatchSize("9", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 1);
|
||||||
checkContentSizes(Arrays.asList(9));
|
checkContentSizes(Arrays.asList(9));
|
||||||
}
|
}
|
||||||
@Test
|
@Test
|
||||||
public void testBatchSizeWithDestContentGroupNoneBatchSize100() throws Exception {
|
public void testBatchSizeWithDestContentGroupNoneBatchSize100() {
|
||||||
testBatchSize("100", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 1);
|
testBatchSize("100", GetHDFSFileInfo.DESTINATION_CONTENT, GetHDFSFileInfo.GROUP_NONE, 1);
|
||||||
checkContentSizes(Arrays.asList(9));
|
checkContentSizes(Arrays.asList(9));
|
||||||
}
|
}
|
||||||
@ -781,15 +770,8 @@ public class TestGetHDFSFileInfo {
|
|||||||
|
|
||||||
private class GetHDFSFileInfoWithMockedFileSystem extends GetHDFSFileInfo {
|
private class GetHDFSFileInfoWithMockedFileSystem extends GetHDFSFileInfo {
|
||||||
private final MockFileSystem fileSystem = new MockFileSystem();
|
private final MockFileSystem fileSystem = new MockFileSystem();
|
||||||
private final KerberosProperties testKerberosProps;
|
|
||||||
|
|
||||||
public GetHDFSFileInfoWithMockedFileSystem(KerberosProperties kerberosProperties) {
|
public GetHDFSFileInfoWithMockedFileSystem() {
|
||||||
this.testKerberosProps = kerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerberosProps;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -25,7 +25,6 @@ import org.apache.hadoop.fs.Path;
|
|||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.util.Progressable;
|
import org.apache.hadoop.util.Progressable;
|
||||||
import org.apache.nifi.components.state.Scope;
|
import org.apache.nifi.components.state.Scope;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processor.exception.ProcessException;
|
import org.apache.nifi.processor.exception.ProcessException;
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
import org.apache.nifi.reporting.InitializationException;
|
||||||
import org.apache.nifi.serialization.record.MockRecordWriter;
|
import org.apache.nifi.serialization.record.MockRecordWriter;
|
||||||
@ -74,9 +73,7 @@ class TestListHDFS {
|
|||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void setup() throws InitializationException {
|
public void setup() throws InitializationException {
|
||||||
final KerberosProperties kerberosProperties = new KerberosProperties(null);
|
proc = new ListHDFSWithMockedFileSystem();
|
||||||
|
|
||||||
proc = new ListHDFSWithMockedFileSystem(kerberosProperties);
|
|
||||||
mockLogger = spy(new MockComponentLog(UUID.randomUUID().toString(), proc));
|
mockLogger = spy(new MockComponentLog(UUID.randomUUID().toString(), proc));
|
||||||
runner = TestRunners.newTestRunner(proc, mockLogger);
|
runner = TestRunners.newTestRunner(proc, mockLogger);
|
||||||
|
|
||||||
@ -560,16 +557,6 @@ class TestListHDFS {
|
|||||||
|
|
||||||
private static class ListHDFSWithMockedFileSystem extends ListHDFS {
|
private static class ListHDFSWithMockedFileSystem extends ListHDFS {
|
||||||
private final MockFileSystem fileSystem = new MockFileSystem();
|
private final MockFileSystem fileSystem = new MockFileSystem();
|
||||||
private final KerberosProperties testKerberosProps;
|
|
||||||
|
|
||||||
public ListHDFSWithMockedFileSystem(KerberosProperties kerberosProperties) {
|
|
||||||
this.testKerberosProps = kerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerberosProps;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FileSystem getFileSystem() {
|
protected FileSystem getFileSystem() {
|
||||||
|
@ -1,155 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.nifi.processors.hadoop;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processors.hadoop.util.FilterMode;
|
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
|
||||||
import org.apache.nifi.util.MockComponentLog;
|
|
||||||
import org.apache.nifi.util.TestRunner;
|
|
||||||
import org.apache.nifi.util.TestRunners;
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
|
||||||
import org.junit.jupiter.api.Disabled;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.UncheckedIOException;
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.time.Instant;
|
|
||||||
import java.util.UUID;
|
|
||||||
|
|
||||||
import static org.mockito.Mockito.spy;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In order to test different ListHDFS implementations change the ListHDFSWithMockedFileSystem ancestor class to the one in question.
|
|
||||||
* Provide the HADOOP_RESOURCE_CONFIG, the ROOT_DIR and set the depth of the HDFS tree structure (k-ary complete tree) with the number of files.
|
|
||||||
* First create the structure by running createHdfsNaryCompleteTree() test case. Then run the testListHdfsTimeElapsed() test case with
|
|
||||||
* the implementation to test.
|
|
||||||
*/
|
|
||||||
@Disabled("This is a performance test and should be run manually")
|
|
||||||
class TestListHDFSPerformanceIT {
|
|
||||||
|
|
||||||
private static final long BYTE_TO_MB = 1024 * 1024;
|
|
||||||
private static final String HADOOP_RESOURCE_CONFIG = "???";
|
|
||||||
private static final FileSystem FILE_SYSTEM = getFileSystem();
|
|
||||||
private static final String ROOT_DIR = "???";
|
|
||||||
private static final int NUM_CHILDREN = 3;
|
|
||||||
private static final int NUM_OF_FILES = 1000;
|
|
||||||
|
|
||||||
|
|
||||||
private TestRunner runner;
|
|
||||||
private MockComponentLog mockLogger;
|
|
||||||
private ListHDFSWithMockedFileSystem proc;
|
|
||||||
|
|
||||||
|
|
||||||
@BeforeEach
|
|
||||||
public void setup() throws InitializationException {
|
|
||||||
final KerberosProperties kerberosProperties = new KerberosProperties(null);
|
|
||||||
|
|
||||||
proc = new ListHDFSWithMockedFileSystem(kerberosProperties);
|
|
||||||
mockLogger = spy(new MockComponentLog(UUID.randomUUID().toString(), proc));
|
|
||||||
runner = TestRunners.newTestRunner(proc, mockLogger);
|
|
||||||
|
|
||||||
runner.setProperty(ListHDFS.HADOOP_CONFIGURATION_RESOURCES, HADOOP_RESOURCE_CONFIG);
|
|
||||||
runner.setProperty(ListHDFS.DIRECTORY, ROOT_DIR);
|
|
||||||
runner.setProperty(ListHDFS.FILE_FILTER_MODE, FilterMode.FILTER_DIRECTORIES_AND_FILES.getValue());
|
|
||||||
runner.setProperty(ListHDFS.FILE_FILTER, "[^\\.].*\\.txt");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Disabled("Enable this test to create an HDFS file tree")
|
|
||||||
void createHdfsNaryCompleteTree() throws IOException {
|
|
||||||
createTree(FILE_SYSTEM, new Path(ROOT_DIR), 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This only measures an estimate memory usage.
|
|
||||||
*/
|
|
||||||
@Test
|
|
||||||
void testListHdfsTimeElapsed() {
|
|
||||||
final Runtime runtime = Runtime.getRuntime();
|
|
||||||
long usedMemoryBefore = getCurrentlyUsedMemory(runtime);
|
|
||||||
Instant start = Instant.now();
|
|
||||||
|
|
||||||
runner.run();
|
|
||||||
|
|
||||||
Instant finish = Instant.now();
|
|
||||||
long timeElapsed = Duration.between(start, finish).toMillis();
|
|
||||||
System.out.println("TIME ELAPSED: " + timeElapsed);
|
|
||||||
|
|
||||||
long usedMemoryAfter = getCurrentlyUsedMemory(runtime);
|
|
||||||
System.out.println("Memory increased (MB):" + (usedMemoryAfter - usedMemoryBefore));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private long getCurrentlyUsedMemory(final Runtime runtime) {
|
|
||||||
return (runtime.totalMemory() - runtime.freeMemory()) / BYTE_TO_MB;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void createTree(FileSystem fileSystem, Path currentPath, int depth) throws IOException {
|
|
||||||
if (depth >= NUM_CHILDREN) {
|
|
||||||
for (int j = 0; j < NUM_OF_FILES; j++) {
|
|
||||||
fileSystem.createNewFile(new Path(currentPath + "/file_" + j));
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (int i = 0; i < NUM_CHILDREN; i++) {
|
|
||||||
String childPath = currentPath.toString() + "/dir_" + i;
|
|
||||||
Path newPath = new Path(childPath);
|
|
||||||
fileSystem.mkdirs(newPath);
|
|
||||||
for (int j = 0; j < NUM_OF_FILES; j++) {
|
|
||||||
fileSystem.createNewFile(new Path(currentPath + "/file_" + j));
|
|
||||||
System.out.println(i + " | " + j + " | File: " + newPath);
|
|
||||||
}
|
|
||||||
System.out.println(i + " | Directory: " + newPath);
|
|
||||||
createTree(fileSystem, newPath, depth + 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private static FileSystem getFileSystem() {
|
|
||||||
String[] locations = HADOOP_RESOURCE_CONFIG.split(",");
|
|
||||||
Configuration config = new Configuration();
|
|
||||||
for (String resource : locations) {
|
|
||||||
config.addResource(new Path(resource.trim()));
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
return FileSystem.get(config);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new UncheckedIOException("Failed to get FileSystem", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class ListHDFSWithMockedFileSystem extends ListHDFS {
|
|
||||||
private final KerberosProperties testKerberosProps;
|
|
||||||
|
|
||||||
public ListHDFSWithMockedFileSystem(KerberosProperties kerberosProperties) {
|
|
||||||
this.testKerberosProps = kerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerberosProps;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -23,7 +23,6 @@ import org.apache.hadoop.hdfs.client.HdfsAdmin;
|
|||||||
import org.apache.hadoop.hdfs.inotify.Event;
|
import org.apache.hadoop.hdfs.inotify.Event;
|
||||||
import org.apache.hadoop.hdfs.inotify.EventBatch;
|
import org.apache.hadoop.hdfs.inotify.EventBatch;
|
||||||
import org.apache.nifi.components.state.Scope;
|
import org.apache.nifi.components.state.Scope;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processors.hadoop.inotify.util.EventTestUtils;
|
import org.apache.nifi.processors.hadoop.inotify.util.EventTestUtils;
|
||||||
import org.apache.nifi.util.MockFlowFile;
|
import org.apache.nifi.util.MockFlowFile;
|
||||||
import org.apache.nifi.util.NiFiProperties;
|
import org.apache.nifi.util.NiFiProperties;
|
||||||
@ -32,7 +31,6 @@ import org.apache.nifi.util.TestRunners;
|
|||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
@ -46,15 +44,12 @@ import static org.mockito.Mockito.when;
|
|||||||
|
|
||||||
public class TestGetHDFSEvents {
|
public class TestGetHDFSEvents {
|
||||||
NiFiProperties mockNiFiProperties;
|
NiFiProperties mockNiFiProperties;
|
||||||
KerberosProperties kerberosProperties;
|
|
||||||
DFSInotifyEventInputStream inotifyEventInputStream;
|
DFSInotifyEventInputStream inotifyEventInputStream;
|
||||||
HdfsAdmin hdfsAdmin;
|
HdfsAdmin hdfsAdmin;
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void setup() {
|
public void setup() {
|
||||||
mockNiFiProperties = mock(NiFiProperties.class);
|
mockNiFiProperties = mock(NiFiProperties.class);
|
||||||
when(mockNiFiProperties.getKerberosConfigurationFile()).thenReturn(null);
|
|
||||||
kerberosProperties = new KerberosProperties(null);
|
|
||||||
inotifyEventInputStream = mock(DFSInotifyEventInputStream.class);
|
inotifyEventInputStream = mock(DFSInotifyEventInputStream.class);
|
||||||
hdfsAdmin = mock(HdfsAdmin.class);
|
hdfsAdmin = mock(HdfsAdmin.class);
|
||||||
}
|
}
|
||||||
@ -62,7 +57,7 @@ public class TestGetHDFSEvents {
|
|||||||
@Test
|
@Test
|
||||||
public void notSettingHdfsPathToWatchShouldThrowError() {
|
public void notSettingHdfsPathToWatchShouldThrowError() {
|
||||||
AssertionError error = assertThrows(AssertionError.class, () -> {
|
AssertionError error = assertThrows(AssertionError.class, () -> {
|
||||||
GetHDFSEvents processor = new TestableGetHDFSEvents(kerberosProperties, hdfsAdmin);
|
GetHDFSEvents processor = new TestableGetHDFSEvents(hdfsAdmin);
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
|
|
||||||
runner.setProperty(GetHDFSEvents.POLL_DURATION, "1 second");
|
runner.setProperty(GetHDFSEvents.POLL_DURATION, "1 second");
|
||||||
@ -81,7 +76,7 @@ public class TestGetHDFSEvents {
|
|||||||
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
||||||
when(eventBatch.getTxid()).thenReturn(100L);
|
when(eventBatch.getTxid()).thenReturn(100L);
|
||||||
|
|
||||||
GetHDFSEvents processor = new TestableGetHDFSEvents(kerberosProperties, hdfsAdmin);
|
GetHDFSEvents processor = new TestableGetHDFSEvents(hdfsAdmin);
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
|
|
||||||
runner.setProperty(GetHDFSEvents.POLL_DURATION, "1 second");
|
runner.setProperty(GetHDFSEvents.POLL_DURATION, "1 second");
|
||||||
@ -100,7 +95,7 @@ public class TestGetHDFSEvents {
|
|||||||
when(inotifyEventInputStream.poll(1000000L, TimeUnit.MICROSECONDS)).thenReturn(null);
|
when(inotifyEventInputStream.poll(1000000L, TimeUnit.MICROSECONDS)).thenReturn(null);
|
||||||
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
||||||
|
|
||||||
GetHDFSEvents processor = new TestableGetHDFSEvents(kerberosProperties, hdfsAdmin);
|
GetHDFSEvents processor = new TestableGetHDFSEvents(hdfsAdmin);
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
|
|
||||||
runner.setProperty(GetHDFSEvents.POLL_DURATION, "1 second");
|
runner.setProperty(GetHDFSEvents.POLL_DURATION, "1 second");
|
||||||
@ -123,7 +118,7 @@ public class TestGetHDFSEvents {
|
|||||||
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
||||||
when(eventBatch.getTxid()).thenReturn(100L);
|
when(eventBatch.getTxid()).thenReturn(100L);
|
||||||
|
|
||||||
GetHDFSEvents processor = new TestableGetHDFSEvents(kerberosProperties, hdfsAdmin);
|
GetHDFSEvents processor = new TestableGetHDFSEvents(hdfsAdmin);
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
|
|
||||||
runner.setProperty(GetHDFSEvents.POLL_DURATION, "1 second");
|
runner.setProperty(GetHDFSEvents.POLL_DURATION, "1 second");
|
||||||
@ -147,7 +142,7 @@ public class TestGetHDFSEvents {
|
|||||||
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
||||||
when(eventBatch.getTxid()).thenReturn(100L);
|
when(eventBatch.getTxid()).thenReturn(100L);
|
||||||
|
|
||||||
GetHDFSEvents processor = new TestableGetHDFSEvents(kerberosProperties, hdfsAdmin);
|
GetHDFSEvents processor = new TestableGetHDFSEvents(hdfsAdmin);
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
|
|
||||||
runner.setProperty(GetHDFSEvents.HDFS_PATH_TO_WATCH, "/some/path/create(/)?");
|
runner.setProperty(GetHDFSEvents.HDFS_PATH_TO_WATCH, "/some/path/create(/)?");
|
||||||
@ -170,7 +165,7 @@ public class TestGetHDFSEvents {
|
|||||||
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
||||||
when(eventBatch.getTxid()).thenReturn(100L);
|
when(eventBatch.getTxid()).thenReturn(100L);
|
||||||
|
|
||||||
GetHDFSEvents processor = new TestableGetHDFSEvents(kerberosProperties, hdfsAdmin);
|
GetHDFSEvents processor = new TestableGetHDFSEvents(hdfsAdmin);
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
|
|
||||||
runner.setProperty(GetHDFSEvents.HDFS_PATH_TO_WATCH, "/some/path(/.*)?");
|
runner.setProperty(GetHDFSEvents.HDFS_PATH_TO_WATCH, "/some/path(/.*)?");
|
||||||
@ -205,7 +200,7 @@ public class TestGetHDFSEvents {
|
|||||||
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
when(hdfsAdmin.getInotifyEventStream()).thenReturn(inotifyEventInputStream);
|
||||||
when(eventBatch.getTxid()).thenReturn(100L);
|
when(eventBatch.getTxid()).thenReturn(100L);
|
||||||
|
|
||||||
GetHDFSEvents processor = new TestableGetHDFSEvents(kerberosProperties, hdfsAdmin);
|
GetHDFSEvents processor = new TestableGetHDFSEvents(hdfsAdmin);
|
||||||
TestRunner runner = TestRunners.newTestRunner(processor);
|
TestRunner runner = TestRunners.newTestRunner(processor);
|
||||||
|
|
||||||
runner.setProperty(GetHDFSEvents.HDFS_PATH_TO_WATCH, "/some/path/${literal(1)}/${literal(2)}/${literal(3)}/.*.txt");
|
runner.setProperty(GetHDFSEvents.HDFS_PATH_TO_WATCH, "/some/path/${literal(1)}/${literal(2)}/${literal(3)}/.*.txt");
|
||||||
@ -218,7 +213,7 @@ public class TestGetHDFSEvents {
|
|||||||
|
|
||||||
for (MockFlowFile f : successfulFlowFiles) {
|
for (MockFlowFile f : successfulFlowFiles) {
|
||||||
String eventType = f.getAttribute(EventAttributes.EVENT_TYPE);
|
String eventType = f.getAttribute(EventAttributes.EVENT_TYPE);
|
||||||
assertTrue(eventType.equals("CREATE"));
|
assertEquals("CREATE", eventType);
|
||||||
}
|
}
|
||||||
|
|
||||||
verify(eventBatch).getTxid();
|
verify(eventBatch).getTxid();
|
||||||
@ -233,14 +228,12 @@ public class TestGetHDFSEvents {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private class TestableGetHDFSEvents extends GetHDFSEvents {
|
private static class TestableGetHDFSEvents extends GetHDFSEvents {
|
||||||
|
|
||||||
private final KerberosProperties testKerberosProperties;
|
|
||||||
private final FileSystem fileSystem = new DistributedFileSystem();
|
private final FileSystem fileSystem = new DistributedFileSystem();
|
||||||
private final HdfsAdmin hdfsAdmin;
|
private final HdfsAdmin hdfsAdmin;
|
||||||
|
|
||||||
TestableGetHDFSEvents(KerberosProperties testKerberosProperties, HdfsAdmin hdfsAdmin) {
|
TestableGetHDFSEvents(HdfsAdmin hdfsAdmin) {
|
||||||
this.testKerberosProperties = testKerberosProperties;
|
|
||||||
this.hdfsAdmin = hdfsAdmin;
|
this.hdfsAdmin = hdfsAdmin;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -249,11 +242,6 @@ public class TestGetHDFSEvents {
|
|||||||
return fileSystem;
|
return fileSystem;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return testKerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected HdfsAdmin getHdfsAdmin() {
|
protected HdfsAdmin getHdfsAdmin() {
|
||||||
return hdfsAdmin;
|
return hdfsAdmin;
|
||||||
|
@ -117,10 +117,6 @@
|
|||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-schema-registry-service-api</artifactId>
|
<artifactId>nifi-schema-registry-service-api</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
||||||
|
@ -59,11 +59,6 @@
|
|||||||
<artifactId>mockwebserver</artifactId>
|
<artifactId>mockwebserver</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
||||||
|
@ -570,12 +570,6 @@
|
|||||||
<version>2.0.0-SNAPSHOT</version>
|
<version>2.0.0-SNAPSHOT</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.postgresql</groupId>
|
<groupId>org.postgresql</groupId>
|
||||||
<artifactId>postgresql</artifactId>
|
<artifactId>postgresql</artifactId>
|
||||||
|
@ -114,12 +114,6 @@
|
|||||||
<version>2.0.0-SNAPSHOT</version>
|
<version>2.0.0-SNAPSHOT</version>
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
<scope>provided</scope>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
||||||
|
@ -41,10 +41,6 @@
|
|||||||
<artifactId>nifi-service-utils</artifactId>
|
<artifactId>nifi-service-utils</artifactId>
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
<version>2.0.0-SNAPSHOT</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
||||||
@ -124,12 +120,6 @@
|
|||||||
<version>${h2.version}</version>
|
<version>${h2.version}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-test-utils</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
|
@ -21,7 +21,6 @@ import java.sql.DriverManager;
|
|||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.AbstractMap;
|
import java.util.AbstractMap;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -39,20 +38,14 @@ import org.apache.nifi.annotation.documentation.Tags;
|
|||||||
import org.apache.nifi.components.PropertyDescriptor;
|
import org.apache.nifi.components.PropertyDescriptor;
|
||||||
import org.apache.nifi.components.PropertyValue;
|
import org.apache.nifi.components.PropertyValue;
|
||||||
import org.apache.nifi.components.RequiredPermission;
|
import org.apache.nifi.components.RequiredPermission;
|
||||||
import org.apache.nifi.components.ValidationContext;
|
|
||||||
import org.apache.nifi.components.ValidationResult;
|
|
||||||
import org.apache.nifi.controller.ConfigurationContext;
|
import org.apache.nifi.controller.ConfigurationContext;
|
||||||
import org.apache.nifi.controller.VerifiableControllerService;
|
import org.apache.nifi.controller.VerifiableControllerService;
|
||||||
import org.apache.nifi.dbcp.utils.DataSourceConfiguration;
|
import org.apache.nifi.dbcp.utils.DataSourceConfiguration;
|
||||||
import org.apache.nifi.expression.AttributeExpression;
|
import org.apache.nifi.expression.AttributeExpression;
|
||||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||||
import org.apache.nifi.kerberos.KerberosCredentialsService;
|
import org.apache.nifi.migration.PropertyConfiguration;
|
||||||
import org.apache.nifi.kerberos.KerberosUserService;
|
|
||||||
import org.apache.nifi.processor.exception.ProcessException;
|
import org.apache.nifi.processor.exception.ProcessException;
|
||||||
import org.apache.nifi.processor.util.StandardValidators;
|
import org.apache.nifi.processor.util.StandardValidators;
|
||||||
import org.apache.nifi.security.krb.KerberosKeytabUser;
|
|
||||||
import org.apache.nifi.security.krb.KerberosPasswordUser;
|
|
||||||
import org.apache.nifi.security.krb.KerberosUser;
|
|
||||||
|
|
||||||
import static org.apache.nifi.dbcp.utils.DBCPProperties.DATABASE_URL;
|
import static org.apache.nifi.dbcp.utils.DBCPProperties.DATABASE_URL;
|
||||||
import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_DRIVERNAME;
|
import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_DRIVERNAME;
|
||||||
@ -104,42 +97,12 @@ public class DBCPConnectionPool extends AbstractDBCPConnectionPool implements DB
|
|||||||
|
|
||||||
private static final List<PropertyDescriptor> PROPERTIES;
|
private static final List<PropertyDescriptor> PROPERTIES;
|
||||||
|
|
||||||
public static final PropertyDescriptor KERBEROS_CREDENTIALS_SERVICE = new PropertyDescriptor.Builder()
|
|
||||||
.name("kerberos-credentials-service")
|
|
||||||
.displayName("Kerberos Credentials Service")
|
|
||||||
.description("Specifies the Kerberos Credentials Controller Service that should be used for authenticating with Kerberos")
|
|
||||||
.identifiesControllerService(KerberosCredentialsService.class)
|
|
||||||
.required(false)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
public static final PropertyDescriptor KERBEROS_PRINCIPAL = new PropertyDescriptor.Builder()
|
|
||||||
.name("kerberos-principal")
|
|
||||||
.displayName("Kerberos Principal")
|
|
||||||
.description("The principal to use when specifying the principal and password directly in the processor for authenticating via Kerberos.")
|
|
||||||
.required(false)
|
|
||||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
|
||||||
.addValidator(StandardValidators.createAttributeExpressionLanguageValidator(AttributeExpression.ResultType.STRING))
|
|
||||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
public static final PropertyDescriptor KERBEROS_PASSWORD = new PropertyDescriptor.Builder()
|
|
||||||
.name("kerberos-password")
|
|
||||||
.displayName("Kerberos Password")
|
|
||||||
.description("The password to use when specifying the principal and password directly in the processor for authenticating via Kerberos.")
|
|
||||||
.required(false)
|
|
||||||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
|
||||||
.sensitive(true)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
static {
|
static {
|
||||||
final List<PropertyDescriptor> props = new ArrayList<>();
|
final List<PropertyDescriptor> props = new ArrayList<>();
|
||||||
props.add(DATABASE_URL);
|
props.add(DATABASE_URL);
|
||||||
props.add(DB_DRIVERNAME);
|
props.add(DB_DRIVERNAME);
|
||||||
props.add(DB_DRIVER_LOCATION);
|
props.add(DB_DRIVER_LOCATION);
|
||||||
props.add(KERBEROS_USER_SERVICE);
|
props.add(KERBEROS_USER_SERVICE);
|
||||||
props.add(KERBEROS_CREDENTIALS_SERVICE);
|
|
||||||
props.add(KERBEROS_PRINCIPAL);
|
|
||||||
props.add(KERBEROS_PASSWORD);
|
|
||||||
props.add(DB_USER);
|
props.add(DB_USER);
|
||||||
props.add(DB_PASSWORD);
|
props.add(DB_PASSWORD);
|
||||||
props.add(MAX_WAIT_TIME);
|
props.add(MAX_WAIT_TIME);
|
||||||
@ -161,56 +124,10 @@ public class DBCPConnectionPool extends AbstractDBCPConnectionPool implements DB
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Collection<ValidationResult> customValidate(ValidationContext context) {
|
public void migrateProperties(final PropertyConfiguration config) {
|
||||||
final List<ValidationResult> results = new ArrayList<>();
|
config.removeProperty("kerberos-principal");
|
||||||
|
config.removeProperty("kerberos-password");
|
||||||
final boolean kerberosPrincipalProvided = !StringUtils.isBlank(context.getProperty(KERBEROS_PRINCIPAL).evaluateAttributeExpressions().getValue());
|
config.removeProperty("kerberos-credentials-service");
|
||||||
final boolean kerberosPasswordProvided = !StringUtils.isBlank(context.getProperty(KERBEROS_PASSWORD).getValue());
|
|
||||||
|
|
||||||
if (kerberosPrincipalProvided && !kerberosPasswordProvided) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject(KERBEROS_PASSWORD.getDisplayName())
|
|
||||||
.valid(false)
|
|
||||||
.explanation("a password must be provided for the given principal")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kerberosPasswordProvided && !kerberosPrincipalProvided) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject(KERBEROS_PRINCIPAL.getDisplayName())
|
|
||||||
.valid(false)
|
|
||||||
.explanation("a principal must be provided for the given password")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
final KerberosCredentialsService kerberosCredentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
|
|
||||||
final KerberosUserService kerberosUserService = context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
|
||||||
|
|
||||||
if (kerberosCredentialsService != null && (kerberosPrincipalProvided || kerberosPasswordProvided)) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject(KERBEROS_CREDENTIALS_SERVICE.getDisplayName())
|
|
||||||
.valid(false)
|
|
||||||
.explanation("kerberos principal/password and kerberos credential service cannot be configured at the same time")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kerberosUserService != null && (kerberosPrincipalProvided || kerberosPasswordProvided)) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject(KERBEROS_USER_SERVICE.getDisplayName())
|
|
||||||
.valid(false)
|
|
||||||
.explanation("kerberos principal/password and kerberos user service cannot be configured at the same time")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kerberosUserService != null && kerberosCredentialsService != null) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject(KERBEROS_USER_SERVICE.getDisplayName())
|
|
||||||
.valid(false)
|
|
||||||
.explanation("kerberos user service and kerberos credential service cannot be configured at the same time")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
BasicDataSource getDataSource() {
|
BasicDataSource getDataSource() {
|
||||||
@ -310,20 +227,4 @@ public class DBCPConnectionPool extends AbstractDBCPConnectionPool implements DB
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosUser getKerberosUserByCredentials(ConfigurationContext context) {
|
|
||||||
KerberosUser kerberosUser = super.getKerberosUserByCredentials(context);
|
|
||||||
if (kerberosUser == null) {
|
|
||||||
final KerberosCredentialsService kerberosCredentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
|
|
||||||
final String kerberosPrincipal = context.getProperty(KERBEROS_PRINCIPAL).evaluateAttributeExpressions().getValue();
|
|
||||||
final String kerberosPassword = context.getProperty(KERBEROS_PASSWORD).getValue();
|
|
||||||
if (kerberosCredentialsService != null) {
|
|
||||||
kerberosUser = new KerberosKeytabUser(kerberosCredentialsService.getPrincipal(), kerberosCredentialsService.getKeytab());
|
|
||||||
} else if (!StringUtils.isBlank(kerberosPrincipal) && !StringUtils.isBlank(kerberosPassword)) {
|
|
||||||
kerberosUser = new KerberosPasswordUser(kerberosPrincipal, kerberosPassword);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return kerberosUser;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -26,9 +26,6 @@ import java.sql.SQLException;
|
|||||||
import java.sql.Statement;
|
import java.sql.Statement;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
import org.apache.nifi.dbcp.utils.DBCPProperties;
|
import org.apache.nifi.dbcp.utils.DBCPProperties;
|
||||||
import org.apache.nifi.kerberos.KerberosCredentialsService;
|
|
||||||
import org.apache.nifi.kerberos.KerberosUserService;
|
|
||||||
import org.apache.nifi.kerberos.MockKerberosCredentialsService;
|
|
||||||
import org.apache.nifi.processor.exception.ProcessException;
|
import org.apache.nifi.processor.exception.ProcessException;
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
import org.apache.nifi.reporting.InitializationException;
|
||||||
import org.apache.nifi.util.NoOpProcessor;
|
import org.apache.nifi.util.NoOpProcessor;
|
||||||
@ -46,8 +43,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
|||||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
public class DBCPServiceTest {
|
public class DBCPServiceTest {
|
||||||
private static final String SERVICE_ID = DBCPConnectionPool.class.getName();
|
private static final String SERVICE_ID = DBCPConnectionPool.class.getName();
|
||||||
@ -108,38 +103,6 @@ public class DBCPServiceTest {
|
|||||||
runner.assertNotValid(service);
|
runner.assertNotValid(service);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testCustomValidateOfKerberosProperties() throws InitializationException {
|
|
||||||
// direct principal + password and no kerberos services is valid
|
|
||||||
runner.setProperty(service, DBCPConnectionPool.KERBEROS_PRINCIPAL, "foo@FOO.COM");
|
|
||||||
runner.setProperty(service, DBCPConnectionPool.KERBEROS_PASSWORD, "fooPassword");
|
|
||||||
runner.assertValid(service);
|
|
||||||
|
|
||||||
// direct principal + password with kerberos credential service is invalid
|
|
||||||
final KerberosCredentialsService kerberosCredentialsService = enabledKerberosCredentialsService(runner);
|
|
||||||
runner.setProperty(service, DBCPConnectionPool.KERBEROS_CREDENTIALS_SERVICE, kerberosCredentialsService.getIdentifier());
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
// kerberos credential service by itself is valid
|
|
||||||
runner.removeProperty(service, DBCPConnectionPool.KERBEROS_PRINCIPAL);
|
|
||||||
runner.removeProperty(service, DBCPConnectionPool.KERBEROS_PASSWORD);
|
|
||||||
runner.assertValid(service);
|
|
||||||
|
|
||||||
// kerberos credential service with kerberos user service is invalid
|
|
||||||
final KerberosUserService kerberosUserService = enableKerberosUserService(runner);
|
|
||||||
runner.setProperty(service, DBCPProperties.KERBEROS_USER_SERVICE, kerberosUserService.getIdentifier());
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
// kerberos user service by itself is valid
|
|
||||||
runner.removeProperty(service, DBCPConnectionPool.KERBEROS_CREDENTIALS_SERVICE);
|
|
||||||
runner.assertValid(service);
|
|
||||||
|
|
||||||
// kerberos user service with direct principal + password is invalid
|
|
||||||
runner.setProperty(service, DBCPConnectionPool.KERBEROS_PRINCIPAL, "foo@FOO.COM");
|
|
||||||
runner.setProperty(service, DBCPConnectionPool.KERBEROS_PASSWORD, "fooPassword");
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testNotValidWithNegativeMinIdleProperty() {
|
public void testNotValidWithNegativeMinIdleProperty() {
|
||||||
runner.setProperty(service, DBCPProperties.MIN_IDLE, "-1");
|
runner.setProperty(service, DBCPProperties.MIN_IDLE, "-1");
|
||||||
@ -194,31 +157,6 @@ public class DBCPServiceTest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testGetConnectionKerberosLoginException() throws InitializationException {
|
|
||||||
final KerberosCredentialsService kerberosCredentialsService = new MockKerberosCredentialsService();
|
|
||||||
final String kerberosServiceId = "kcs";
|
|
||||||
runner.addControllerService(kerberosServiceId, kerberosCredentialsService);
|
|
||||||
runner.setProperty(kerberosCredentialsService, MockKerberosCredentialsService.PRINCIPAL, "bad@PRINCIPAL.COM");
|
|
||||||
runner.setProperty(kerberosCredentialsService, MockKerberosCredentialsService.KEYTAB, "src/test/resources/fake.keytab");
|
|
||||||
runner.enableControllerService(kerberosCredentialsService);
|
|
||||||
|
|
||||||
// set fake Derby database connection url
|
|
||||||
runner.setProperty(service, DBCPProperties.DATABASE_URL, "jdbc:derby://localhost:1527/NoDB");
|
|
||||||
// Use the client driver here rather than the embedded one, as it will generate a ConnectException for the test
|
|
||||||
runner.setProperty(service, DBCPProperties.DB_DRIVERNAME, "org.apache.derby.jdbc.ClientDriver");
|
|
||||||
runner.setProperty(service, DBCPConnectionPool.KERBEROS_CREDENTIALS_SERVICE, kerberosServiceId);
|
|
||||||
|
|
||||||
try {
|
|
||||||
runner.enableControllerService(service);
|
|
||||||
} catch (AssertionError ae) {
|
|
||||||
// Ignore, this happens because it tries to do the initial Kerberos login
|
|
||||||
}
|
|
||||||
|
|
||||||
runner.assertValid(service);
|
|
||||||
assertThrows(ProcessException.class, service::getConnection);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetConnection() throws SQLException {
|
public void testGetConnection() throws SQLException {
|
||||||
runner.setProperty(service, DBCPProperties.MAX_TOTAL_CONNECTIONS, "2");
|
runner.setProperty(service, DBCPProperties.MAX_TOTAL_CONNECTIONS, "2");
|
||||||
@ -278,25 +216,6 @@ public class DBCPServiceTest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private KerberosUserService enableKerberosUserService(final TestRunner runner) throws InitializationException {
|
|
||||||
final KerberosUserService kerberosUserService = mock(KerberosUserService.class);
|
|
||||||
when(kerberosUserService.getIdentifier()).thenReturn("userService1");
|
|
||||||
runner.addControllerService(kerberosUserService.getIdentifier(), kerberosUserService);
|
|
||||||
runner.enableControllerService(kerberosUserService);
|
|
||||||
return kerberosUserService;
|
|
||||||
}
|
|
||||||
|
|
||||||
private KerberosCredentialsService enabledKerberosCredentialsService(final TestRunner runner) throws InitializationException {
|
|
||||||
final KerberosCredentialsService credentialsService = mock(KerberosCredentialsService.class);
|
|
||||||
when(credentialsService.getIdentifier()).thenReturn("credsService1");
|
|
||||||
when(credentialsService.getPrincipal()).thenReturn("principal1");
|
|
||||||
when(credentialsService.getKeytab()).thenReturn("keytab1");
|
|
||||||
|
|
||||||
runner.addControllerService(credentialsService.getIdentifier(), credentialsService);
|
|
||||||
runner.enableControllerService(credentialsService);
|
|
||||||
return credentialsService;
|
|
||||||
}
|
|
||||||
|
|
||||||
private File getEmptyDirectory() {
|
private File getEmptyDirectory() {
|
||||||
final String randomDirectory = String.format("%s-%s", getClass().getSimpleName(), UUID.randomUUID());
|
final String randomDirectory = String.format("%s-%s", getClass().getSimpleName(), UUID.randomUUID());
|
||||||
return Paths.get(getSystemTemporaryDirectory(), randomDirectory).toFile();
|
return Paths.get(getSystemTemporaryDirectory(), randomDirectory).toFile();
|
||||||
|
@ -65,9 +65,6 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
import static org.apache.nifi.dbcp.DBCPConnectionPool.KERBEROS_CREDENTIALS_SERVICE;
|
|
||||||
import static org.apache.nifi.dbcp.DBCPConnectionPool.KERBEROS_PASSWORD;
|
|
||||||
import static org.apache.nifi.dbcp.DBCPConnectionPool.KERBEROS_PRINCIPAL;
|
|
||||||
import static org.apache.nifi.dbcp.utils.DBCPProperties.DATABASE_URL;
|
import static org.apache.nifi.dbcp.utils.DBCPProperties.DATABASE_URL;
|
||||||
import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_DRIVERNAME;
|
import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_DRIVERNAME;
|
||||||
import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_DRIVER_LOCATION;
|
import static org.apache.nifi.dbcp.utils.DBCPProperties.DB_DRIVER_LOCATION;
|
||||||
@ -336,10 +333,7 @@ public class DatabaseRecordSinkTest {
|
|||||||
when(dbContext.getProperty(EVICTION_RUN_PERIOD)).thenReturn(new MockPropertyValue("5 sec"));
|
when(dbContext.getProperty(EVICTION_RUN_PERIOD)).thenReturn(new MockPropertyValue("5 sec"));
|
||||||
when(dbContext.getProperty(MIN_EVICTABLE_IDLE_TIME)).thenReturn(new MockPropertyValue("5 sec"));
|
when(dbContext.getProperty(MIN_EVICTABLE_IDLE_TIME)).thenReturn(new MockPropertyValue("5 sec"));
|
||||||
when(dbContext.getProperty(SOFT_MIN_EVICTABLE_IDLE_TIME)).thenReturn(new MockPropertyValue("5 sec"));
|
when(dbContext.getProperty(SOFT_MIN_EVICTABLE_IDLE_TIME)).thenReturn(new MockPropertyValue("5 sec"));
|
||||||
when(dbContext.getProperty(KERBEROS_CREDENTIALS_SERVICE)).thenReturn(new MockPropertyValue(null));
|
|
||||||
when(dbContext.getProperty(KERBEROS_USER_SERVICE)).thenReturn(new MockPropertyValue(null));
|
when(dbContext.getProperty(KERBEROS_USER_SERVICE)).thenReturn(new MockPropertyValue(null));
|
||||||
when(dbContext.getProperty(KERBEROS_PRINCIPAL)).thenReturn(new MockPropertyValue(null));
|
|
||||||
when(dbContext.getProperty(KERBEROS_PASSWORD)).thenReturn(new MockPropertyValue(null));
|
|
||||||
|
|
||||||
final ControllerServiceInitializationContext dbInitContext = new MockControllerServiceInitializationContext(dbcpService, UUID.randomUUID().toString(), logger, dbStateManager);
|
final ControllerServiceInitializationContext dbInitContext = new MockControllerServiceInitializationContext(dbcpService, UUID.randomUUID().toString(), logger, dbStateManager);
|
||||||
dbcpService.initialize(dbInitContext);
|
dbcpService.initialize(dbInitContext);
|
||||||
|
@ -45,10 +45,6 @@
|
|||||||
<artifactId>nifi-hadoop-utils</artifactId>
|
<artifactId>nifi-hadoop-utils</artifactId>
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
<version>2.0.0-SNAPSHOT</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
||||||
|
@ -20,7 +20,6 @@ import org.apache.commons.dbcp2.BasicDataSource;
|
|||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
|
||||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||||
import org.apache.nifi.annotation.behavior.RequiresInstanceClassLoading;
|
import org.apache.nifi.annotation.behavior.RequiresInstanceClassLoading;
|
||||||
import org.apache.nifi.annotation.behavior.Restricted;
|
import org.apache.nifi.annotation.behavior.Restricted;
|
||||||
@ -40,21 +39,11 @@ import org.apache.nifi.controller.ControllerServiceInitializationContext;
|
|||||||
import org.apache.nifi.dbcp.utils.DBCPProperties;
|
import org.apache.nifi.dbcp.utils.DBCPProperties;
|
||||||
import org.apache.nifi.dbcp.utils.DataSourceConfiguration;
|
import org.apache.nifi.dbcp.utils.DataSourceConfiguration;
|
||||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
import org.apache.nifi.migration.PropertyConfiguration;
|
||||||
import org.apache.nifi.hadoop.SecurityUtil;
|
|
||||||
import org.apache.nifi.kerberos.KerberosCredentialsService;
|
|
||||||
import org.apache.nifi.kerberos.KerberosUserService;
|
|
||||||
import org.apache.nifi.processor.exception.ProcessException;
|
import org.apache.nifi.processor.exception.ProcessException;
|
||||||
import org.apache.nifi.processor.util.StandardValidators;
|
import org.apache.nifi.processor.util.StandardValidators;
|
||||||
import org.apache.nifi.security.krb.KerberosKeytabUser;
|
|
||||||
import org.apache.nifi.security.krb.KerberosLoginException;
|
|
||||||
import org.apache.nifi.security.krb.KerberosPasswordUser;
|
|
||||||
|
|
||||||
import javax.security.auth.login.LoginException;
|
import javax.security.auth.login.LoginException;
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.lang.reflect.UndeclaredThrowableException;
|
|
||||||
import java.security.PrivilegedExceptionAction;
|
|
||||||
import java.sql.Connection;
|
import java.sql.Connection;
|
||||||
import java.sql.Driver;
|
import java.sql.Driver;
|
||||||
import java.sql.DriverManager;
|
import java.sql.DriverManager;
|
||||||
@ -103,8 +92,6 @@ import static org.apache.nifi.dbcp.utils.DBCPProperties.extractMillisWithInfinit
|
|||||||
)
|
)
|
||||||
public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
||||||
|
|
||||||
private static final String ALLOW_EXPLICIT_KEYTAB = "NIFI_ALLOW_EXPLICIT_KEYTAB";
|
|
||||||
|
|
||||||
private static final String HADOOP_CONFIGURATION_CLASS = "org.apache.hadoop.conf.Configuration";
|
private static final String HADOOP_CONFIGURATION_CLASS = "org.apache.hadoop.conf.Configuration";
|
||||||
private static final String HADOOP_UGI_CLASS = "org.apache.hadoop.security.UserGroupInformation";
|
private static final String HADOOP_UGI_CLASS = "org.apache.hadoop.security.UserGroupInformation";
|
||||||
|
|
||||||
@ -128,18 +115,8 @@ public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
|||||||
.dynamicallyModifiesClasspath(true)
|
.dynamicallyModifiesClasspath(true)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
public static final PropertyDescriptor KERBEROS_CREDENTIALS_SERVICE = new PropertyDescriptor.Builder()
|
|
||||||
.name("kerberos-credentials-service")
|
|
||||||
.displayName("Kerberos Credentials Service")
|
|
||||||
.description("Specifies the Kerberos Credentials Controller Service that should be used for authenticating with Kerberos")
|
|
||||||
.identifiesControllerService(KerberosCredentialsService.class)
|
|
||||||
.required(false)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
private List<PropertyDescriptor> properties;
|
private List<PropertyDescriptor> properties;
|
||||||
|
|
||||||
private volatile UserGroupInformation ugi;
|
|
||||||
private volatile Boolean foundHadoopDependencies;
|
private volatile Boolean foundHadoopDependencies;
|
||||||
|
|
||||||
// Holder of cached Configuration information so validation does not reload the same config over and over
|
// Holder of cached Configuration information so validation does not reload the same config over and over
|
||||||
@ -147,19 +124,12 @@ public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void init(final ControllerServiceInitializationContext context) {
|
protected void init(final ControllerServiceInitializationContext context) {
|
||||||
File kerberosConfigFile = context.getKerberosConfigurationFile();
|
|
||||||
kerberosProperties = getKerberosProperties(kerberosConfigFile);
|
|
||||||
|
|
||||||
properties = Arrays.asList(
|
properties = Arrays.asList(
|
||||||
DATABASE_URL,
|
DATABASE_URL,
|
||||||
DB_DRIVERNAME,
|
DB_DRIVERNAME,
|
||||||
DB_DRIVER_LOCATION,
|
DB_DRIVER_LOCATION,
|
||||||
HADOOP_CONFIGURATION_RESOURCES,
|
HADOOP_CONFIGURATION_RESOURCES,
|
||||||
KERBEROS_USER_SERVICE,
|
KERBEROS_USER_SERVICE,
|
||||||
KERBEROS_CREDENTIALS_SERVICE,
|
|
||||||
kerberosProperties.getKerberosPrincipal(),
|
|
||||||
kerberosProperties.getKerberosKeytab(),
|
|
||||||
kerberosProperties.getKerberosPassword(),
|
|
||||||
DB_USER,
|
DB_USER,
|
||||||
DB_PASSWORD,
|
DB_PASSWORD,
|
||||||
MAX_WAIT_TIME,
|
MAX_WAIT_TIME,
|
||||||
@ -174,8 +144,12 @@ public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
@Override
|
||||||
return new KerberosProperties(kerberosConfigFile);
|
public void migrateProperties(final PropertyConfiguration config) {
|
||||||
|
config.removeProperty("Kerberos Principal");
|
||||||
|
config.removeProperty("Kerberos Password");
|
||||||
|
config.removeProperty("Kerberos Keytab");
|
||||||
|
config.removeProperty("kerberos-credentials-service");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -220,24 +194,6 @@ public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
|||||||
return problems;
|
return problems;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hadoop classes were found, so proceed with the rest of validation...
|
|
||||||
|
|
||||||
final String explicitPrincipal = validationContext.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
|
|
||||||
final String explicitKeytab = validationContext.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
|
|
||||||
final String explicitPassword = validationContext.getProperty(kerberosProperties.getKerberosPassword()).getValue();
|
|
||||||
final KerberosCredentialsService credentialsService = validationContext.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
|
|
||||||
final KerberosUserService kerberosUserService = validationContext.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
|
||||||
|
|
||||||
final String resolvedPrincipal;
|
|
||||||
final String resolvedKeytab;
|
|
||||||
if (credentialsService == null) {
|
|
||||||
resolvedPrincipal = explicitPrincipal;
|
|
||||||
resolvedKeytab = explicitKeytab;
|
|
||||||
} else {
|
|
||||||
resolvedPrincipal = credentialsService.getPrincipal();
|
|
||||||
resolvedKeytab = credentialsService.getKeytab();
|
|
||||||
}
|
|
||||||
|
|
||||||
final boolean confFileProvided = validationContext.getProperty(HADOOP_CONFIGURATION_RESOURCES).isSet();
|
final boolean confFileProvided = validationContext.getProperty(HADOOP_CONFIGURATION_RESOURCES).isSet();
|
||||||
if (confFileProvided) {
|
if (confFileProvided) {
|
||||||
final String configFiles = validationContext.getProperty(HADOOP_CONFIGURATION_RESOURCES).evaluateAttributeExpressions().getValue();
|
final String configFiles = validationContext.getProperty(HADOOP_CONFIGURATION_RESOURCES).evaluateAttributeExpressions().getValue();
|
||||||
@ -250,50 +206,6 @@ public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
|||||||
resources = new ValidationResources(configFiles, getConfigurationFromFiles(configFiles));
|
resources = new ValidationResources(configFiles, getConfigurationFromFiles(configFiles));
|
||||||
validationResourceHolder.set(resources);
|
validationResourceHolder.set(resources);
|
||||||
}
|
}
|
||||||
|
|
||||||
final Configuration hadoopConfig = resources.getConfiguration();
|
|
||||||
if (kerberosUserService == null) {
|
|
||||||
problems.addAll(KerberosProperties.validatePrincipalWithKeytabOrPassword(getClass().getSimpleName(), hadoopConfig,
|
|
||||||
resolvedPrincipal, resolvedKeytab, explicitPassword, getLogger()));
|
|
||||||
} else {
|
|
||||||
final boolean securityEnabled = SecurityUtil.isSecurityEnabled(hadoopConfig);
|
|
||||||
if (!securityEnabled) {
|
|
||||||
getLogger().warn("Hadoop Configuration does not have security enabled, KerberosUserService will be ignored");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (credentialsService != null && (explicitPrincipal != null || explicitKeytab != null || explicitPassword != null)) {
|
|
||||||
problems.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos Credentials")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("Cannot specify a Kerberos Credentials Service while also specifying a Kerberos Principal, Kerberos Keytab, or Kerberos Password")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kerberosUserService != null && (explicitPrincipal != null || explicitKeytab != null || explicitPassword != null)) {
|
|
||||||
problems.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos User")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("Cannot specify a Kerberos User Service while also specifying a Kerberos Principal, Kerberos Keytab, or Kerberos Password")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kerberosUserService != null && credentialsService != null) {
|
|
||||||
problems.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos User")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("Cannot specify a Kerberos User Service while also specifying a Kerberos Credentials Service")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isAllowExplicitKeytab() && explicitKeytab != null) {
|
|
||||||
problems.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos Credentials")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("The '" + ALLOW_EXPLICIT_KEYTAB + "' system environment variable is configured to forbid explicitly configuring Kerberos Keytab in processors. "
|
|
||||||
+ "The Kerberos Credentials Service should be used instead of setting the Kerberos Keytab or Kerberos Principal property.")
|
|
||||||
.build());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return problems;
|
return problems;
|
||||||
@ -321,7 +233,7 @@ public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
|||||||
* @param context the configuration context
|
* @param context the configuration context
|
||||||
*/
|
*/
|
||||||
@OnEnabled
|
@OnEnabled
|
||||||
public void onEnabled(final ConfigurationContext context) throws IOException {
|
public void onEnabled(final ConfigurationContext context) {
|
||||||
// Get Configuration instance from specified resources
|
// Get Configuration instance from specified resources
|
||||||
final String configFiles = context.getProperty(HADOOP_CONFIGURATION_RESOURCES).evaluateAttributeExpressions().getValue();
|
final String configFiles = context.getProperty(HADOOP_CONFIGURATION_RESOURCES).evaluateAttributeExpressions().getValue();
|
||||||
final Configuration hadoopConfig = getConfigurationFromFiles(configFiles);
|
final Configuration hadoopConfig = getConfigurationFromFiles(configFiles);
|
||||||
@ -333,39 +245,6 @@ public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
|||||||
hadoopConfig.set(descriptor.getName(), context.getProperty(descriptor).evaluateAttributeExpressions().getValue());
|
hadoopConfig.set(descriptor.getName(), context.getProperty(descriptor).evaluateAttributeExpressions().getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If security is enabled then determine how to authenticate based on the various principal/keytab/password options
|
|
||||||
if (SecurityUtil.isSecurityEnabled(hadoopConfig)) {
|
|
||||||
final String explicitPrincipal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
|
|
||||||
final String explicitKeytab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
|
|
||||||
final String explicitPassword = context.getProperty(kerberosProperties.getKerberosPassword()).getValue();
|
|
||||||
final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
|
|
||||||
|
|
||||||
final String resolvedPrincipal;
|
|
||||||
final String resolvedKeytab;
|
|
||||||
if (credentialsService != null) {
|
|
||||||
resolvedPrincipal = credentialsService.getPrincipal();
|
|
||||||
resolvedKeytab = credentialsService.getKeytab();
|
|
||||||
} else {
|
|
||||||
resolvedPrincipal = explicitPrincipal;
|
|
||||||
resolvedKeytab = explicitKeytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (resolvedKeytab != null) {
|
|
||||||
kerberosUser = new KerberosKeytabUser(resolvedPrincipal, resolvedKeytab);
|
|
||||||
getLogger().info("Security Enabled, logging in as principal {} with keytab {}", resolvedPrincipal, resolvedKeytab);
|
|
||||||
} else if (explicitPassword != null) {
|
|
||||||
kerberosUser = new KerberosPasswordUser(resolvedPrincipal, explicitPassword);
|
|
||||||
getLogger().info("Security Enabled, logging in as principal {} with password", resolvedPrincipal);
|
|
||||||
} else {
|
|
||||||
throw new IOException("Unable to authenticate with Kerberos, no keytab or password was provided");
|
|
||||||
}
|
|
||||||
|
|
||||||
ugi = SecurityUtil.getUgiForKerberosUser(hadoopConfig, kerberosUser);
|
|
||||||
getLogger().info("Successfully logged in as principal {}", resolvedPrincipal);
|
|
||||||
} else {
|
|
||||||
getLogger().info("Simple Authentication");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -379,22 +258,15 @@ public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
|||||||
*/
|
*/
|
||||||
@OnDisabled
|
@OnDisabled
|
||||||
public void shutdown() throws SQLException {
|
public void shutdown() throws SQLException {
|
||||||
|
validationResourceHolder.set(null);
|
||||||
|
foundHadoopDependencies = null;
|
||||||
|
kerberosUser = null;
|
||||||
try {
|
try {
|
||||||
if (kerberosUser != null) {
|
if (dataSource != null) {
|
||||||
kerberosUser.logout();
|
dataSource.close();
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
validationResourceHolder.set(null);
|
dataSource = null;
|
||||||
foundHadoopDependencies = null;
|
|
||||||
kerberosUser = null;
|
|
||||||
ugi = null;
|
|
||||||
try {
|
|
||||||
if (dataSource != null) {
|
|
||||||
dataSource.close();
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
dataSource = null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -456,40 +328,8 @@ public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
|||||||
@Override
|
@Override
|
||||||
public Connection getConnection() throws ProcessException {
|
public Connection getConnection() throws ProcessException {
|
||||||
try {
|
try {
|
||||||
if (ugi != null) {
|
return dataSource.getConnection();
|
||||||
// Explicitly check the TGT and relogin if necessary with the KerberosUser instance. No synchronization
|
} catch (SQLException e) {
|
||||||
// is necessary in the client code, since AbstractKerberosUser's checkTGTAndRelogin method is synchronized.
|
|
||||||
getLogger().trace("getting UGI instance");
|
|
||||||
if (kerberosUser != null) {
|
|
||||||
// if there's a KerberosUser associated with this UGI, check the TGT and relogin if it is close to expiring
|
|
||||||
getLogger().debug("kerberosUser is {}", kerberosUser);
|
|
||||||
try {
|
|
||||||
getLogger().debug("checking TGT on kerberosUser {}", kerberosUser);
|
|
||||||
kerberosUser.checkTGTAndRelogin();
|
|
||||||
} catch (final KerberosLoginException e) {
|
|
||||||
throw new ProcessException("Unable to relogin with kerberos credentials for " + kerberosUser.getPrincipal(), e);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
getLogger().debug("kerberosUser was null, will not refresh TGT with KerberosUser");
|
|
||||||
// no synchronization is needed for UserGroupInformation.checkTGTAndReloginFromKeytab; UGI handles the synchronization internally
|
|
||||||
ugi.checkTGTAndReloginFromKeytab();
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
return ugi.doAs((PrivilegedExceptionAction<Connection>) () -> dataSource.getConnection());
|
|
||||||
} catch (UndeclaredThrowableException e) {
|
|
||||||
Throwable cause = e.getCause();
|
|
||||||
if (cause instanceof SQLException) {
|
|
||||||
throw (SQLException) cause;
|
|
||||||
} else {
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
getLogger().info("Simple Authentication");
|
|
||||||
return dataSource.getConnection();
|
|
||||||
}
|
|
||||||
} catch (SQLException | IOException | InterruptedException e) {
|
|
||||||
throw new ProcessException(e);
|
throw new ProcessException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -498,12 +338,4 @@ public class HadoopDBCPConnectionPool extends AbstractDBCPConnectionPool {
|
|||||||
public String toString() {
|
public String toString() {
|
||||||
return "HadoopDBCPConnectionPool[id=" + getIdentifier() + "]";
|
return "HadoopDBCPConnectionPool[id=" + getIdentifier() + "]";
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* Overridable by subclasses in the same package, mainly intended for testing purposes to allow verification without having to set environment variables.
|
|
||||||
*/
|
|
||||||
boolean isAllowExplicitKeytab() {
|
|
||||||
return Boolean.parseBoolean(System.getenv(ALLOW_EXPLICIT_KEYTAB));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1,185 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.nifi.dbcp;
|
|
||||||
|
|
||||||
import org.apache.nifi.controller.AbstractControllerService;
|
|
||||||
import org.apache.nifi.dbcp.utils.DBCPProperties;
|
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.kerberos.KerberosContext;
|
|
||||||
import org.apache.nifi.kerberos.KerberosCredentialsService;
|
|
||||||
import org.apache.nifi.kerberos.KerberosUserService;
|
|
||||||
import org.apache.nifi.processor.Processor;
|
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
|
||||||
import org.apache.nifi.util.MockKerberosContext;
|
|
||||||
import org.apache.nifi.util.TestRunner;
|
|
||||||
import org.apache.nifi.util.TestRunners;
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
|
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
public class HadoopDBCPConnectionPoolTest {
|
|
||||||
|
|
||||||
private File krbConfFile;
|
|
||||||
private KerberosProperties kerberosProps;
|
|
||||||
private KerberosContext kerberosContext;
|
|
||||||
|
|
||||||
@BeforeEach
|
|
||||||
public void setup() {
|
|
||||||
krbConfFile = new File("src/test/resources/krb5.conf");
|
|
||||||
kerberosProps = new KerberosProperties(krbConfFile);
|
|
||||||
kerberosContext = new MockKerberosContext(krbConfFile);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testCustomValidateWhenAllowExplicitKeytab() throws InitializationException {
|
|
||||||
final Processor testProcessor = new TestProcessor();
|
|
||||||
final TestRunner runner = TestRunners.newTestRunner(testProcessor, kerberosContext);
|
|
||||||
|
|
||||||
// Configure minimum required properties..
|
|
||||||
final HadoopDBCPConnectionPool hadoopDBCPService = new TestableHadoopDBCPConnectionPool(true);
|
|
||||||
runner.addControllerService("hadoop-dbcp-service", hadoopDBCPService);
|
|
||||||
runner.setProperty(hadoopDBCPService, DBCPProperties.DATABASE_URL, "jdbc:phoenix:zk-host1,zk-host2:2181:/hbase");
|
|
||||||
runner.setProperty(hadoopDBCPService, DBCPProperties.DB_DRIVERNAME, "org.apache.phoenix.jdbc.PhoenixDriver");
|
|
||||||
runner.setProperty(hadoopDBCPService, DBCPProperties.DB_DRIVER_LOCATION, "target");
|
|
||||||
|
|
||||||
// Security is not enabled yet since no conf files provided, so should be valid
|
|
||||||
runner.assertValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Enable security, should be invalid until some form of kerberos credentials are provided
|
|
||||||
runner.setProperty(hadoopDBCPService, HadoopDBCPConnectionPool.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site-security.xml");
|
|
||||||
runner.assertNotValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Configure principal and keytab, should be valid
|
|
||||||
runner.setProperty(hadoopDBCPService, kerberosProps.getKerberosPrincipal(), "nifi@EXAMPLE.COM");
|
|
||||||
runner.setProperty(hadoopDBCPService, kerberosProps.getKerberosKeytab(), "src/test/resources/fake.keytab");
|
|
||||||
runner.assertValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Configure password, should become invalid
|
|
||||||
runner.setProperty(hadoopDBCPService, kerberosProps.getKerberosPassword(), "password");
|
|
||||||
runner.assertNotValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Remove keytab property, should become valid
|
|
||||||
runner.removeProperty(hadoopDBCPService, kerberosProps.getKerberosKeytab());
|
|
||||||
runner.assertValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Configure a KerberosCredentialService, should become invalid
|
|
||||||
final KerberosCredentialsService kerberosCredentialsService = new MockKerberosCredentialsService(
|
|
||||||
"nifi@EXAMPLE.COM", "src/test/resources/fake.keytab");
|
|
||||||
runner.addControllerService("kerb-credentials", kerberosCredentialsService);
|
|
||||||
runner.enableControllerService(kerberosCredentialsService);
|
|
||||||
runner.setProperty(hadoopDBCPService, HadoopDBCPConnectionPool.KERBEROS_CREDENTIALS_SERVICE, "kerb-credentials");
|
|
||||||
runner.assertNotValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Remove password property, still invalid
|
|
||||||
runner.removeProperty(hadoopDBCPService, kerberosProps.getKerberosPassword());
|
|
||||||
runner.assertNotValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Remove principal property, only using keytab service, should become valid
|
|
||||||
runner.removeProperty(hadoopDBCPService, kerberosProps.getKerberosPrincipal());
|
|
||||||
runner.assertValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Configure KerberosUserService, should be invalid since KerberosCredentialService also configured
|
|
||||||
final KerberosUserService kerberosUserService = mock(KerberosUserService.class);
|
|
||||||
when(kerberosUserService.getIdentifier()).thenReturn("userService1");
|
|
||||||
runner.addControllerService(kerberosUserService.getIdentifier(), kerberosUserService);
|
|
||||||
runner.enableControllerService(kerberosUserService);
|
|
||||||
runner.setProperty(hadoopDBCPService, DBCPProperties.KERBEROS_USER_SERVICE, kerberosUserService.getIdentifier());
|
|
||||||
runner.assertNotValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Remove KerberosCredentialService, should be valid with only KerberosUserService
|
|
||||||
runner.removeProperty(hadoopDBCPService, HadoopDBCPConnectionPool.KERBEROS_CREDENTIALS_SERVICE);
|
|
||||||
runner.assertValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Configure explicit principal and keytab, should be invalid while kerberos user service is set
|
|
||||||
runner.setProperty(hadoopDBCPService, kerberosProps.getKerberosPrincipal(), "nifi@EXAMPLE.COM");
|
|
||||||
runner.setProperty(hadoopDBCPService, kerberosProps.getKerberosKeytab(), "src/test/resources/fake.keytab");
|
|
||||||
runner.assertNotValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Remove explicit keytab, set explicit password, still invalid while kerberos user service set
|
|
||||||
runner.removeProperty(hadoopDBCPService, kerberosProps.getKerberosKeytab());
|
|
||||||
runner.setProperty(hadoopDBCPService, kerberosProps.getKerberosPassword(), "password");
|
|
||||||
runner.assertNotValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Remove kerberos user service, should be valid
|
|
||||||
runner.removeProperty(hadoopDBCPService, DBCPProperties.KERBEROS_USER_SERVICE);
|
|
||||||
runner.assertValid(hadoopDBCPService);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testCustomValidateWhenNotAllowExplicitKeytab() throws InitializationException {
|
|
||||||
final Processor testProcessor = new TestProcessor();
|
|
||||||
final TestRunner runner = TestRunners.newTestRunner(testProcessor, kerberosContext);
|
|
||||||
|
|
||||||
// Configure minimum required properties..
|
|
||||||
final HadoopDBCPConnectionPool hadoopDBCPService = new TestableHadoopDBCPConnectionPool(false);
|
|
||||||
runner.addControllerService("hadoop-dbcp-service", hadoopDBCPService);
|
|
||||||
runner.setProperty(hadoopDBCPService, DBCPProperties.DATABASE_URL, "jdbc:phoenix:zk-host1,zk-host2:2181:/hbase");
|
|
||||||
runner.setProperty(hadoopDBCPService, DBCPProperties.DB_DRIVERNAME, "org.apache.phoenix.jdbc.PhoenixDriver");
|
|
||||||
runner.setProperty(hadoopDBCPService, HadoopDBCPConnectionPool.DB_DRIVER_LOCATION, "target");
|
|
||||||
|
|
||||||
// Security is not enabled yet since no conf files provided, so should be valid
|
|
||||||
runner.assertValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Enable security, should be invalid until some form of kerberos credentials are provided
|
|
||||||
runner.setProperty(hadoopDBCPService, HadoopDBCPConnectionPool.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site-security.xml");
|
|
||||||
runner.assertNotValid(hadoopDBCPService);
|
|
||||||
|
|
||||||
// Configure principal and keytab, should be valid
|
|
||||||
runner.setProperty(hadoopDBCPService, kerberosProps.getKerberosPrincipal(), "nifi@EXAMPLE.COM");
|
|
||||||
runner.assertNotValid(hadoopDBCPService);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final class TestableHadoopDBCPConnectionPool extends HadoopDBCPConnectionPool {
|
|
||||||
|
|
||||||
private final boolean allowExplicitKeytab;
|
|
||||||
|
|
||||||
public TestableHadoopDBCPConnectionPool(boolean allowExplicitKeytab) {
|
|
||||||
this.allowExplicitKeytab = allowExplicitKeytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
boolean isAllowExplicitKeytab() {
|
|
||||||
return allowExplicitKeytab;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private class MockKerberosCredentialsService extends AbstractControllerService implements KerberosCredentialsService {
|
|
||||||
|
|
||||||
private String principal;
|
|
||||||
private String keytab;
|
|
||||||
|
|
||||||
public MockKerberosCredentialsService(String principal, String keytab) {
|
|
||||||
this.principal = principal;
|
|
||||||
this.keytab = keytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getKeytab() {
|
|
||||||
return keytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getPrincipal() {
|
|
||||||
return principal;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,45 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.nifi.dbcp;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.nifi.components.PropertyDescriptor;
|
|
||||||
import org.apache.nifi.processor.AbstractProcessor;
|
|
||||||
import org.apache.nifi.processor.ProcessContext;
|
|
||||||
import org.apache.nifi.processor.ProcessSession;
|
|
||||||
import org.apache.nifi.processor.exception.ProcessException;
|
|
||||||
|
|
||||||
public class TestProcessor extends AbstractProcessor {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
|
||||||
List<PropertyDescriptor> propDescs = new ArrayList<>();
|
|
||||||
propDescs.add(new PropertyDescriptor.Builder()
|
|
||||||
.name("DBCPService test processor")
|
|
||||||
.description("DBCPService test processor")
|
|
||||||
.identifiesControllerService(DBCPService.class)
|
|
||||||
.required(true)
|
|
||||||
.build());
|
|
||||||
return propDescs;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,30 +0,0 @@
|
|||||||
<?xml version="1.0"?>
|
|
||||||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
|
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
|
||||||
this work for additional information regarding copyright ownership.
|
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
(the "License"); you may not use this file except in compliance with
|
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>fs.default.name</name>
|
|
||||||
<value>hdfs://hbase</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hadoop.security.authentication</name>
|
|
||||||
<value>kerberos</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hadoop.security.authorization</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
@ -1,22 +0,0 @@
|
|||||||
<?xml version="1.0"?>
|
|
||||||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
|
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
|
||||||
this work for additional information regarding copyright ownership.
|
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
(the "License"); you may not use this file except in compliance with
|
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>fs.default.name</name>
|
|
||||||
<value>hdfs://hbase</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
@ -1,12 +0,0 @@
|
|||||||
[libdefaults]
|
|
||||||
default_realm = EXAMPLE.COM
|
|
||||||
|
|
||||||
[realms]
|
|
||||||
EXAMPLE.COM = {
|
|
||||||
kdc = kdc1.example.com
|
|
||||||
kdc = kdc2.example.com
|
|
||||||
admin_server = kdc1.example.com
|
|
||||||
}
|
|
||||||
|
|
||||||
[domain_realm]
|
|
||||||
.example.com = EXAMPLE.COM
|
|
@ -53,10 +53,6 @@
|
|||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-record</artifactId>
|
<artifactId>nifi-record</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.nifi.hbase;
|
package org.apache.nifi.hbase;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
@ -75,7 +74,6 @@ import org.apache.nifi.controller.AbstractControllerService;
|
|||||||
import org.apache.nifi.controller.ConfigurationContext;
|
import org.apache.nifi.controller.ConfigurationContext;
|
||||||
import org.apache.nifi.controller.ControllerServiceInitializationContext;
|
import org.apache.nifi.controller.ControllerServiceInitializationContext;
|
||||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.hadoop.SecurityUtil;
|
import org.apache.nifi.hadoop.SecurityUtil;
|
||||||
import org.apache.nifi.hbase.put.PutColumn;
|
import org.apache.nifi.hbase.put.PutColumn;
|
||||||
import org.apache.nifi.hbase.put.PutFlowFile;
|
import org.apache.nifi.hbase.put.PutFlowFile;
|
||||||
@ -83,12 +81,9 @@ import org.apache.nifi.hbase.scan.Column;
|
|||||||
import org.apache.nifi.hbase.scan.HBaseRegion;
|
import org.apache.nifi.hbase.scan.HBaseRegion;
|
||||||
import org.apache.nifi.hbase.scan.ResultCell;
|
import org.apache.nifi.hbase.scan.ResultCell;
|
||||||
import org.apache.nifi.hbase.scan.ResultHandler;
|
import org.apache.nifi.hbase.scan.ResultHandler;
|
||||||
import org.apache.nifi.kerberos.KerberosCredentialsService;
|
|
||||||
import org.apache.nifi.kerberos.KerberosUserService;
|
import org.apache.nifi.kerberos.KerberosUserService;
|
||||||
|
import org.apache.nifi.migration.PropertyConfiguration;
|
||||||
import org.apache.nifi.processor.util.StandardValidators;
|
import org.apache.nifi.processor.util.StandardValidators;
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
|
||||||
import org.apache.nifi.security.krb.KerberosKeytabUser;
|
|
||||||
import org.apache.nifi.security.krb.KerberosPasswordUser;
|
|
||||||
import org.apache.nifi.security.krb.KerberosUser;
|
import org.apache.nifi.security.krb.KerberosUser;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
@ -111,18 +106,8 @@ import org.slf4j.LoggerFactory;
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
public class HBase_2_ClientService extends AbstractControllerService implements HBaseClientService {
|
public class HBase_2_ClientService extends AbstractControllerService implements HBaseClientService {
|
||||||
private static final String ALLOW_EXPLICIT_KEYTAB = "NIFI_ALLOW_EXPLICIT_KEYTAB";
|
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(HBase_2_ClientService.class);
|
private static final Logger logger = LoggerFactory.getLogger(HBase_2_ClientService.class);
|
||||||
|
|
||||||
static final PropertyDescriptor KERBEROS_CREDENTIALS_SERVICE = new PropertyDescriptor.Builder()
|
|
||||||
.name("kerberos-credentials-service")
|
|
||||||
.displayName("Kerberos Credentials Service")
|
|
||||||
.description("Specifies the Kerberos Credentials Controller Service that should be used for authenticating with Kerberos")
|
|
||||||
.identifiesControllerService(KerberosCredentialsService.class)
|
|
||||||
.required(false)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
static final PropertyDescriptor KERBEROS_USER_SERVICE = new PropertyDescriptor.Builder()
|
static final PropertyDescriptor KERBEROS_USER_SERVICE = new PropertyDescriptor.Builder()
|
||||||
.name("kerberos-user-service")
|
.name("kerberos-user-service")
|
||||||
.displayName("Kerberos User Service")
|
.displayName("Kerberos User Service")
|
||||||
@ -189,25 +174,16 @@ public class HBase_2_ClientService extends AbstractControllerService implements
|
|||||||
private volatile String masterAddress;
|
private volatile String masterAddress;
|
||||||
|
|
||||||
private List<PropertyDescriptor> properties;
|
private List<PropertyDescriptor> properties;
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
private volatile File kerberosConfigFile = null;
|
|
||||||
|
|
||||||
// Holder of cached Configuration information so validation does not reload the same config over and over
|
// Holder of cached Configuration information so validation does not reload the same config over and over
|
||||||
private final AtomicReference<ValidationResources> validationResourceHolder = new AtomicReference<>();
|
private final AtomicReference<ValidationResources> validationResourceHolder = new AtomicReference<>();
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void init(ControllerServiceInitializationContext config) throws InitializationException {
|
protected void init(ControllerServiceInitializationContext config) {
|
||||||
kerberosConfigFile = config.getKerberosConfigurationFile();
|
|
||||||
kerberosProperties = getKerberosProperties(kerberosConfigFile);
|
|
||||||
|
|
||||||
List<PropertyDescriptor> props = new ArrayList<>();
|
List<PropertyDescriptor> props = new ArrayList<>();
|
||||||
props.add(HADOOP_CONF_FILES);
|
props.add(HADOOP_CONF_FILES);
|
||||||
props.add(KERBEROS_USER_SERVICE);
|
props.add(KERBEROS_USER_SERVICE);
|
||||||
props.add(KERBEROS_CREDENTIALS_SERVICE);
|
|
||||||
props.add(kerberosProperties.getKerberosPrincipal());
|
|
||||||
props.add(kerberosProperties.getKerberosKeytab());
|
|
||||||
props.add(kerberosProperties.getKerberosPassword());
|
|
||||||
props.add(ZOOKEEPER_QUORUM);
|
props.add(ZOOKEEPER_QUORUM);
|
||||||
props.add(ZOOKEEPER_CLIENT_PORT);
|
props.add(ZOOKEEPER_CLIENT_PORT);
|
||||||
props.add(ZOOKEEPER_ZNODE_PARENT);
|
props.add(ZOOKEEPER_ZNODE_PARENT);
|
||||||
@ -217,12 +193,16 @@ public class HBase_2_ClientService extends AbstractControllerService implements
|
|||||||
this.properties = Collections.unmodifiableList(props);
|
this.properties = Collections.unmodifiableList(props);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected List<PropertyDescriptor> getAdditionalProperties() {
|
@Override
|
||||||
return new ArrayList<>();
|
public void migrateProperties(final PropertyConfiguration config) {
|
||||||
|
config.removeProperty("Kerberos Principal");
|
||||||
|
config.removeProperty("Kerberos Password");
|
||||||
|
config.removeProperty("Kerberos Keytab");
|
||||||
|
config.removeProperty("kerberos-credentials-service");
|
||||||
}
|
}
|
||||||
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
protected List<PropertyDescriptor> getAdditionalProperties() {
|
||||||
return new KerberosProperties(kerberosConfigFile);
|
return new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -248,22 +228,6 @@ public class HBase_2_ClientService extends AbstractControllerService implements
|
|||||||
boolean znodeParentProvided = validationContext.getProperty(ZOOKEEPER_ZNODE_PARENT).isSet();
|
boolean znodeParentProvided = validationContext.getProperty(ZOOKEEPER_ZNODE_PARENT).isSet();
|
||||||
boolean retriesProvided = validationContext.getProperty(HBASE_CLIENT_RETRIES).isSet();
|
boolean retriesProvided = validationContext.getProperty(HBASE_CLIENT_RETRIES).isSet();
|
||||||
|
|
||||||
final String explicitPrincipal = validationContext.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
|
|
||||||
final String explicitKeytab = validationContext.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
|
|
||||||
final String explicitPassword = validationContext.getProperty(kerberosProperties.getKerberosPassword()).getValue();
|
|
||||||
final KerberosCredentialsService credentialsService = validationContext.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
|
|
||||||
final KerberosUserService kerberosUserService = validationContext.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
|
||||||
|
|
||||||
final String resolvedPrincipal;
|
|
||||||
final String resolvedKeytab;
|
|
||||||
if (credentialsService == null) {
|
|
||||||
resolvedPrincipal = explicitPrincipal;
|
|
||||||
resolvedKeytab = explicitKeytab;
|
|
||||||
} else {
|
|
||||||
resolvedPrincipal = credentialsService.getPrincipal();
|
|
||||||
resolvedKeytab = credentialsService.getKeytab();
|
|
||||||
}
|
|
||||||
|
|
||||||
final List<ValidationResult> problems = new ArrayList<>();
|
final List<ValidationResult> problems = new ArrayList<>();
|
||||||
|
|
||||||
if (!confFileProvided && (!zkQuorumProvided || !zkPortProvided || !znodeParentProvided || !retriesProvided)) {
|
if (!confFileProvided && (!zkQuorumProvided || !zkPortProvided || !znodeParentProvided || !retriesProvided)) {
|
||||||
@ -286,50 +250,6 @@ public class HBase_2_ClientService extends AbstractControllerService implements
|
|||||||
resources = new ValidationResources(configFiles, getConfigurationFromFiles(configFiles));
|
resources = new ValidationResources(configFiles, getConfigurationFromFiles(configFiles));
|
||||||
validationResourceHolder.set(resources);
|
validationResourceHolder.set(resources);
|
||||||
}
|
}
|
||||||
|
|
||||||
final Configuration hbaseConfig = resources.getConfiguration();
|
|
||||||
if (kerberosUserService == null) {
|
|
||||||
problems.addAll(KerberosProperties.validatePrincipalWithKeytabOrPassword(getClass().getSimpleName(), hbaseConfig,
|
|
||||||
resolvedPrincipal, resolvedKeytab, explicitPassword, getLogger()));
|
|
||||||
} else {
|
|
||||||
final boolean securityEnabled = SecurityUtil.isSecurityEnabled(hbaseConfig);
|
|
||||||
if (!securityEnabled) {
|
|
||||||
getLogger().warn("Hadoop Configuration does not have security enabled, KerberosUserService will be ignored");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (credentialsService != null && (explicitPrincipal != null || explicitKeytab != null || explicitPassword != null)) {
|
|
||||||
problems.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos Credentials")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("Cannot specify a Kerberos Credentials Service while also specifying a Kerberos Principal, Kerberos Keytab, or Kerberos Password")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kerberosUserService != null && (explicitPrincipal != null || explicitKeytab != null || explicitPassword != null)) {
|
|
||||||
problems.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos User")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("Cannot specify a Kerberos User Service while also specifying a Kerberos Principal, Kerberos Keytab, or Kerberos Password")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kerberosUserService != null && credentialsService != null) {
|
|
||||||
problems.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos User")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("Cannot specify a Kerberos User Service while also specifying a Kerberos Credentials Service")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isAllowExplicitKeytab() && explicitKeytab != null) {
|
|
||||||
problems.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos Credentials")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("The '" + ALLOW_EXPLICIT_KEYTAB + "' system environment variable is configured to forbid explicitly configuring Kerberos Keytab in processors. "
|
|
||||||
+ "The Kerberos Credentials Service should be used instead of setting the Kerberos Keytab or Kerberos Principal property.")
|
|
||||||
.build());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return problems;
|
return problems;
|
||||||
@ -408,24 +328,6 @@ public class HBase_2_ClientService extends AbstractControllerService implements
|
|||||||
final KerberosUserService kerberosUserService = context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
final KerberosUserService kerberosUserService = context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
|
||||||
if (kerberosUserService != null) {
|
if (kerberosUserService != null) {
|
||||||
return kerberosUserService.createKerberosUser();
|
return kerberosUserService.createKerberosUser();
|
||||||
}
|
|
||||||
|
|
||||||
String principal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
|
|
||||||
String keyTab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
|
|
||||||
String password = context.getProperty(kerberosProperties.getKerberosPassword()).getValue();
|
|
||||||
|
|
||||||
// If the Kerberos Credentials Service is specified, we need to use its configuration, not the explicit properties for principal/keytab.
|
|
||||||
// The customValidate method ensures that only one can be set, so we know that the principal & keytab above are null.
|
|
||||||
final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
|
|
||||||
if (credentialsService != null) {
|
|
||||||
principal = credentialsService.getPrincipal();
|
|
||||||
keyTab = credentialsService.getKeytab();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (keyTab != null) {
|
|
||||||
return new KerberosKeytabUser(principal, keyTab);
|
|
||||||
} else if (password != null) {
|
|
||||||
return new KerberosPasswordUser(principal, password);
|
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalStateException("Unable to authenticate with Kerberos, no keytab or password was provided");
|
throw new IllegalStateException("Unable to authenticate with Kerberos, no keytab or password was provided");
|
||||||
}
|
}
|
||||||
@ -974,13 +876,6 @@ public class HBase_2_ClientService extends AbstractControllerService implements
|
|||||||
return "hbase://" + transitUriMasterAddress + "/" + tableName + (StringUtils.isEmpty(rowKey) ? "" : "/" + rowKey);
|
return "hbase://" + transitUriMasterAddress + "/" + tableName + (StringUtils.isEmpty(rowKey) ? "" : "/" + rowKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* Overridable by subclasses in the same package, mainly intended for testing purposes to allow verification without having to set environment variables.
|
|
||||||
*/
|
|
||||||
boolean isAllowExplicitKeytab() {
|
|
||||||
return Boolean.parseBoolean(System.getenv(ALLOW_EXPLICIT_KEYTAB));
|
|
||||||
}
|
|
||||||
|
|
||||||
UserGroupInformation getUgi() throws IOException {
|
UserGroupInformation getUgi() throws IOException {
|
||||||
getLogger().trace("getting UGI instance");
|
getLogger().trace("getting UGI instance");
|
||||||
// if there is a KerberosUser associated with UGI, call checkTGTAndRelogin to ensure UGI's underlying Subject has a valid ticket
|
// if there is a KerberosUser associated with UGI, call checkTGTAndRelogin to ensure UGI's underlying Subject has a valid ticket
|
||||||
|
@ -25,13 +25,11 @@ import org.apache.hadoop.hbase.client.Table;
|
|||||||
import org.apache.hadoop.hbase.filter.Filter;
|
import org.apache.hadoop.hbase.filter.Filter;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.nifi.controller.ConfigurationContext;
|
import org.apache.nifi.controller.ConfigurationContext;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.hbase.put.PutColumn;
|
import org.apache.nifi.hbase.put.PutColumn;
|
||||||
import org.apache.nifi.hbase.put.PutFlowFile;
|
import org.apache.nifi.hbase.put.PutFlowFile;
|
||||||
import org.apache.nifi.hbase.scan.Column;
|
import org.apache.nifi.hbase.scan.Column;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
@ -55,8 +53,6 @@ public class MockHBaseClientService extends HBase_2_ClientService {
|
|||||||
private Table table;
|
private Table table;
|
||||||
private String family;
|
private String family;
|
||||||
private Map<String, Result> results = new HashMap<>();
|
private Map<String, Result> results = new HashMap<>();
|
||||||
private KerberosProperties kerberosProperties;
|
|
||||||
private boolean allowExplicitKeytab;
|
|
||||||
private UserGroupInformation mockUgi;
|
private UserGroupInformation mockUgi;
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -71,25 +67,9 @@ public class MockHBaseClientService extends HBase_2_ClientService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public MockHBaseClientService(final Table table, final String family, final KerberosProperties kerberosProperties) {
|
public MockHBaseClientService(final Table table, final String family) {
|
||||||
this(table, family, kerberosProperties, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
public MockHBaseClientService(final Table table, final String family, final KerberosProperties kerberosProperties, boolean allowExplicitKeytab) {
|
|
||||||
this.table = table;
|
this.table = table;
|
||||||
this.family = family;
|
this.family = family;
|
||||||
this.kerberosProperties = kerberosProperties;
|
|
||||||
this.allowExplicitKeytab = allowExplicitKeytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
|
|
||||||
return kerberosProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void setKerberosProperties(KerberosProperties properties) {
|
|
||||||
this.kerberosProperties = properties;
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addResult(final String rowKey, final Map<String, String> cells, final long timestamp) {
|
public void addResult(final String rowKey, final Map<String, String> cells, final long timestamp) {
|
||||||
@ -224,11 +204,6 @@ public class MockHBaseClientService extends HBase_2_ClientService {
|
|||||||
return connection;
|
return connection;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
boolean isAllowExplicitKeytab() {
|
|
||||||
return allowExplicitKeytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
UserGroupInformation getUgi() throws IOException {
|
UserGroupInformation getUgi() throws IOException {
|
||||||
return mockUgi;
|
return mockUgi;
|
||||||
|
@ -27,7 +27,6 @@ import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
|
|||||||
import org.apache.nifi.distributed.cache.client.Serializer;
|
import org.apache.nifi.distributed.cache.client.Serializer;
|
||||||
import org.apache.nifi.distributed.cache.client.exception.DeserializationException;
|
import org.apache.nifi.distributed.cache.client.exception.DeserializationException;
|
||||||
import org.apache.nifi.distributed.cache.client.exception.SerializationException;
|
import org.apache.nifi.distributed.cache.client.exception.SerializationException;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.hbase.scan.ResultCell;
|
import org.apache.nifi.hbase.scan.ResultCell;
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
import org.apache.nifi.reporting.InitializationException;
|
||||||
import org.apache.nifi.util.TestRunner;
|
import org.apache.nifi.util.TestRunner;
|
||||||
@ -37,7 +36,6 @@ import org.junit.jupiter.api.Test;
|
|||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
@ -58,9 +56,6 @@ import static org.mockito.Mockito.when;
|
|||||||
|
|
||||||
public class TestHBase_2_ClientMapCacheService {
|
public class TestHBase_2_ClientMapCacheService {
|
||||||
|
|
||||||
private KerberosProperties kerberosPropsWithFile;
|
|
||||||
private KerberosProperties kerberosPropsWithoutFile;
|
|
||||||
|
|
||||||
private Serializer<String> stringSerializer = new StringSerializer();
|
private Serializer<String> stringSerializer = new StringSerializer();
|
||||||
private Deserializer<String> stringDeserializer = new StringDeserializer();
|
private Deserializer<String> stringDeserializer = new StringDeserializer();
|
||||||
|
|
||||||
@ -70,10 +65,6 @@ public class TestHBase_2_ClientMapCacheService {
|
|||||||
// config with Kerberos authentication enabled
|
// config with Kerberos authentication enabled
|
||||||
System.setProperty("java.security.krb5.realm", "nifi.com");
|
System.setProperty("java.security.krb5.realm", "nifi.com");
|
||||||
System.setProperty("java.security.krb5.kdc", "nifi.kdc");
|
System.setProperty("java.security.krb5.kdc", "nifi.kdc");
|
||||||
|
|
||||||
kerberosPropsWithFile = new KerberosProperties(new File("src/test/resources/krb5.conf"));
|
|
||||||
|
|
||||||
kerberosPropsWithoutFile = new KerberosProperties(null);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private final String tableName = "nifi";
|
private final String tableName = "nifi";
|
||||||
@ -391,7 +382,7 @@ public class TestHBase_2_ClientMapCacheService {
|
|||||||
|
|
||||||
|
|
||||||
private MockHBaseClientService configureHBaseClientService(final TestRunner runner, final Table table) throws InitializationException {
|
private MockHBaseClientService configureHBaseClientService(final TestRunner runner, final Table table) throws InitializationException {
|
||||||
final MockHBaseClientService service = new MockHBaseClientService(table, "family1", kerberosPropsWithFile);
|
final MockHBaseClientService service = new MockHBaseClientService(table, "family1");
|
||||||
runner.addControllerService("hbaseClient", service);
|
runner.addControllerService("hbaseClient", service);
|
||||||
runner.setProperty(service, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
|
runner.setProperty(service, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
|
||||||
runner.enableControllerService(service);
|
runner.enableControllerService(service);
|
||||||
|
@ -20,14 +20,11 @@ import org.apache.hadoop.hbase.Cell;
|
|||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
import org.apache.hadoop.hbase.client.Table;
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.hbase.put.PutColumn;
|
import org.apache.nifi.hbase.put.PutColumn;
|
||||||
import org.apache.nifi.hbase.put.PutFlowFile;
|
import org.apache.nifi.hbase.put.PutFlowFile;
|
||||||
import org.apache.nifi.hbase.scan.Column;
|
import org.apache.nifi.hbase.scan.Column;
|
||||||
import org.apache.nifi.hbase.scan.ResultCell;
|
import org.apache.nifi.hbase.scan.ResultCell;
|
||||||
import org.apache.nifi.hbase.scan.ResultHandler;
|
import org.apache.nifi.hbase.scan.ResultHandler;
|
||||||
import org.apache.nifi.kerberos.KerberosCredentialsService;
|
|
||||||
import org.apache.nifi.kerberos.KerberosUserService;
|
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
import org.apache.nifi.reporting.InitializationException;
|
||||||
import org.apache.nifi.util.TestRunner;
|
import org.apache.nifi.util.TestRunner;
|
||||||
import org.apache.nifi.util.TestRunners;
|
import org.apache.nifi.util.TestRunners;
|
||||||
@ -36,7 +33,6 @@ import org.junit.jupiter.api.Test;
|
|||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -51,7 +47,6 @@ import java.util.NavigableMap;
|
|||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
import static org.mockito.Mockito.times;
|
import static org.mockito.Mockito.times;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
@ -60,202 +55,12 @@ public class TestHBase_2_ClientService {
|
|||||||
|
|
||||||
static final String COL_FAM = "nifi1";
|
static final String COL_FAM = "nifi1";
|
||||||
|
|
||||||
private KerberosProperties kerberosPropsWithFile;
|
|
||||||
private KerberosProperties kerberosPropsWithoutFile;
|
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void setup() {
|
public void setup() {
|
||||||
// needed for calls to UserGroupInformation.setConfiguration() to work when passing in
|
// needed for calls to UserGroupInformation.setConfiguration() to work when passing in
|
||||||
// config with Kerberos authentication enabled
|
// config with Kerberos authentication enabled
|
||||||
System.setProperty("java.security.krb5.realm", "nifi.com");
|
System.setProperty("java.security.krb5.realm", "nifi.com");
|
||||||
System.setProperty("java.security.krb5.kdc", "nifi.kdc");
|
System.setProperty("java.security.krb5.kdc", "nifi.kdc");
|
||||||
|
|
||||||
kerberosPropsWithFile = new KerberosProperties(new File("src/test/resources/krb5.conf"));
|
|
||||||
|
|
||||||
kerberosPropsWithoutFile = new KerberosProperties(null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testCustomValidate() throws InitializationException {
|
|
||||||
final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
|
|
||||||
|
|
||||||
final String tableName = "nifi";
|
|
||||||
final Table table = Mockito.mock(Table.class);
|
|
||||||
when(table.getName()).thenReturn(TableName.valueOf(tableName));
|
|
||||||
|
|
||||||
// no conf file or zk properties so should be invalid
|
|
||||||
MockHBaseClientService service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
|
|
||||||
runner.addControllerService("hbaseClientService", service);
|
|
||||||
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
runner.removeControllerService(service);
|
|
||||||
|
|
||||||
runner.setEnvironmentVariableValue("hadoop-conf-files", "src/test/resources/hbase-site.xml");
|
|
||||||
runner.setEnvironmentVariableValue("zk-quorum", "localhost");
|
|
||||||
runner.setEnvironmentVariableValue("zk-client-port", "2181");
|
|
||||||
runner.setEnvironmentVariableValue("zk-znode", "/hbase");
|
|
||||||
|
|
||||||
// conf file with no zk properties should be valid
|
|
||||||
service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
|
|
||||||
runner.addControllerService("hbaseClientService", service);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.HADOOP_CONF_FILES, "${hadoop-conf-files}");
|
|
||||||
runner.enableControllerService(service);
|
|
||||||
|
|
||||||
runner.assertValid(service);
|
|
||||||
runner.removeControllerService(service);
|
|
||||||
|
|
||||||
// only quorum and no conf file should be invalid
|
|
||||||
service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
|
|
||||||
runner.addControllerService("hbaseClientService", service);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.ZOOKEEPER_QUORUM, "${zk-quorum}");
|
|
||||||
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
runner.removeControllerService(service);
|
|
||||||
|
|
||||||
// quorum and port, no znode, no conf file, should be invalid
|
|
||||||
service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
|
|
||||||
runner.addControllerService("hbaseClientService", service);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.ZOOKEEPER_QUORUM, "${zk-quorum}");
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.ZOOKEEPER_CLIENT_PORT, "${zk-client-port}");
|
|
||||||
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
runner.removeControllerService(service);
|
|
||||||
|
|
||||||
// quorum, port, and znode, no conf file, should be valid
|
|
||||||
service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
|
|
||||||
runner.addControllerService("hbaseClientService", service);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.ZOOKEEPER_QUORUM, "${zk-quorum}");
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.ZOOKEEPER_CLIENT_PORT, "${zk-client-port}");
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.ZOOKEEPER_ZNODE_PARENT, "${zk-znode}");
|
|
||||||
runner.enableControllerService(service);
|
|
||||||
|
|
||||||
runner.assertValid(service);
|
|
||||||
runner.removeControllerService(service);
|
|
||||||
|
|
||||||
// quorum and port with conf file should be valid
|
|
||||||
service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
|
|
||||||
runner.addControllerService("hbaseClientService", service);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.ZOOKEEPER_QUORUM, "localhost");
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.ZOOKEEPER_CLIENT_PORT, "2181");
|
|
||||||
runner.enableControllerService(service);
|
|
||||||
|
|
||||||
runner.assertValid(service);
|
|
||||||
runner.removeControllerService(service);
|
|
||||||
|
|
||||||
// Kerberos - principal with non-set keytab and only hbase-site-security - valid because we need core-site-security to turn on security
|
|
||||||
service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile, true);
|
|
||||||
runner.addControllerService("hbaseClientService", service);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site-security.xml");
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosPrincipal(), "test@REALM");
|
|
||||||
runner.enableControllerService(service);
|
|
||||||
runner.assertValid(service);
|
|
||||||
|
|
||||||
// Kerberos - principal with non-set keytab and both config files
|
|
||||||
runner.disableControllerService(service);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.HADOOP_CONF_FILES,
|
|
||||||
"src/test/resources/hbase-site-security.xml, src/test/resources/core-site-security.xml");
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
// Kerberos - add valid options
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosPrincipal(), "test@REALM");
|
|
||||||
runner.enableControllerService(service);
|
|
||||||
runner.assertValid(service);
|
|
||||||
|
|
||||||
// Kerberos - add invalid non-existent keytab file
|
|
||||||
runner.disableControllerService(service);
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/missing.keytab");
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
// Kerberos - add invalid principal
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosPrincipal(), "");
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
// Kerberos - valid props but the KerberosProperties has a null Kerberos config file so be invalid
|
|
||||||
service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithoutFile);
|
|
||||||
runner.addControllerService("hbaseClientService", service);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.HADOOP_CONF_FILES,
|
|
||||||
"src/test/resources/hbase-site-security.xml, src/test/resources/core-site-security.xml");
|
|
||||||
runner.setProperty(service, kerberosPropsWithoutFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
|
|
||||||
runner.setProperty(service, kerberosPropsWithoutFile.getKerberosPrincipal(), "test@REALM");
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
// Kerberos - add valid options with password
|
|
||||||
service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile, true);
|
|
||||||
runner.addControllerService("hbaseClientService", service);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.HADOOP_CONF_FILES,
|
|
||||||
"src/test/resources/hbase-site.xml, src/test/resources/core-site-security.xml");
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosPassword(), "password");
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosPrincipal(), "test@REALM");
|
|
||||||
runner.assertValid(service);
|
|
||||||
|
|
||||||
// Kerberos - keytab and password at same time should be invalid
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
runner.removeProperty(service, kerberosPropsWithFile.getKerberosKeytab());
|
|
||||||
runner.assertValid(service);
|
|
||||||
|
|
||||||
// Kerberos - credentials service not valid when other kerberos properties set
|
|
||||||
final KerberosCredentialsService credentialsService = enabledKerberosCredentialsService(runner);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.KERBEROS_CREDENTIALS_SERVICE, credentialsService.getIdentifier());
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
runner.removeProperty(service, kerberosPropsWithFile.getKerberosPassword());
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
runner.removeProperty(service, kerberosPropsWithFile.getKerberosPrincipal());
|
|
||||||
runner.assertValid(service);
|
|
||||||
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
runner.removeProperty(service, kerberosPropsWithFile.getKerberosKeytab());
|
|
||||||
runner.assertValid(service);
|
|
||||||
|
|
||||||
// Kerberos - user service with credentials service is invalid
|
|
||||||
final KerberosUserService userService = enableKerberosUserService(runner);
|
|
||||||
runner.setProperty(service, HBase_2_ClientService.KERBEROS_USER_SERVICE, userService.getIdentifier());
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
runner.removeProperty(service, HBase_2_ClientService.KERBEROS_CREDENTIALS_SERVICE);
|
|
||||||
runner.assertValid(service);
|
|
||||||
|
|
||||||
// Kerberos - user service with other kerberos properties is invalid
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosPassword(), "password");
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosPrincipal(), "test@REALM");
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
runner.removeProperty(service, kerberosPropsWithFile.getKerberosPassword());
|
|
||||||
runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
runner.removeProperty(service, kerberosPropsWithFile.getKerberosKeytab());
|
|
||||||
runner.assertNotValid(service);
|
|
||||||
|
|
||||||
runner.removeProperty(service, kerberosPropsWithFile.getKerberosPrincipal());
|
|
||||||
runner.assertValid(service);
|
|
||||||
}
|
|
||||||
|
|
||||||
private KerberosUserService enableKerberosUserService(final TestRunner runner) throws InitializationException {
|
|
||||||
final KerberosUserService kerberosUserService = mock(KerberosUserService.class);
|
|
||||||
when(kerberosUserService.getIdentifier()).thenReturn("userService1");
|
|
||||||
runner.addControllerService(kerberosUserService.getIdentifier(), kerberosUserService);
|
|
||||||
runner.enableControllerService(kerberosUserService);
|
|
||||||
return kerberosUserService;
|
|
||||||
}
|
|
||||||
|
|
||||||
private KerberosCredentialsService enabledKerberosCredentialsService(final TestRunner runner) throws InitializationException {
|
|
||||||
final KerberosCredentialsService credentialsService = mock(KerberosCredentialsService.class);
|
|
||||||
when(credentialsService.getIdentifier()).thenReturn("credsService1");
|
|
||||||
when(credentialsService.getPrincipal()).thenReturn("principal1");
|
|
||||||
when(credentialsService.getKeytab()).thenReturn("keytab1");
|
|
||||||
|
|
||||||
runner.addControllerService(credentialsService.getIdentifier(), credentialsService);
|
|
||||||
runner.enableControllerService(credentialsService);
|
|
||||||
return credentialsService;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -480,7 +285,7 @@ public class TestHBase_2_ClientService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private MockHBaseClientService configureHBaseClientService(final TestRunner runner, final Table table) throws InitializationException {
|
private MockHBaseClientService configureHBaseClientService(final TestRunner runner, final Table table) throws InitializationException {
|
||||||
final MockHBaseClientService service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
|
final MockHBaseClientService service = new MockHBaseClientService(table, COL_FAM);
|
||||||
runner.addControllerService("hbaseClient", service);
|
runner.addControllerService("hbaseClient", service);
|
||||||
runner.setProperty(service, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
|
runner.setProperty(service, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
|
||||||
runner.enableControllerService(service);
|
runner.enableControllerService(service);
|
||||||
|
@ -20,7 +20,6 @@ package org.apache.nifi.hbase;
|
|||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Table;
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
import org.apache.nifi.components.PropertyDescriptor;
|
import org.apache.nifi.components.PropertyDescriptor;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.processor.AbstractProcessor;
|
import org.apache.nifi.processor.AbstractProcessor;
|
||||||
import org.apache.nifi.processor.ProcessContext;
|
import org.apache.nifi.processor.ProcessContext;
|
||||||
import org.apache.nifi.processor.ProcessSession;
|
import org.apache.nifi.processor.ProcessSession;
|
||||||
@ -31,7 +30,6 @@ import org.junit.jupiter.api.BeforeEach;
|
|||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -59,8 +57,7 @@ public class TestHBase_2_ListLookupService {
|
|||||||
final Table table = Mockito.mock(Table.class);
|
final Table table = Mockito.mock(Table.class);
|
||||||
when(table.getName()).thenReturn(TableName.valueOf(TABLE_NAME));
|
when(table.getName()).thenReturn(TableName.valueOf(TABLE_NAME));
|
||||||
|
|
||||||
final KerberosProperties kerberosProperties = new KerberosProperties(new File("src/test/resources/krb5.conf"));
|
clientService = new MockHBaseClientService(table, "family");
|
||||||
clientService = new MockHBaseClientService(table, "family", kerberosProperties);
|
|
||||||
runner.addControllerService("clientService", clientService);
|
runner.addControllerService("clientService", clientService);
|
||||||
runner.setProperty(clientService, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
|
runner.setProperty(clientService, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
|
||||||
runner.enableControllerService(clientService);
|
runner.enableControllerService(clientService);
|
||||||
|
@ -19,7 +19,6 @@ package org.apache.nifi.hbase;
|
|||||||
|
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Table;
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
import org.apache.nifi.hadoop.KerberosProperties;
|
|
||||||
import org.apache.nifi.serialization.record.Record;
|
import org.apache.nifi.serialization.record.Record;
|
||||||
import org.apache.nifi.util.TestRunner;
|
import org.apache.nifi.util.TestRunner;
|
||||||
import org.apache.nifi.util.TestRunners;
|
import org.apache.nifi.util.TestRunners;
|
||||||
@ -27,7 +26,6 @@ import org.junit.jupiter.api.BeforeEach;
|
|||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -55,8 +53,7 @@ public class TestHBase_2_RecordLookupService {
|
|||||||
final Table table = Mockito.mock(Table.class);
|
final Table table = Mockito.mock(Table.class);
|
||||||
when(table.getName()).thenReturn(TableName.valueOf(TABLE_NAME));
|
when(table.getName()).thenReturn(TableName.valueOf(TABLE_NAME));
|
||||||
|
|
||||||
final KerberosProperties kerberosProperties = new KerberosProperties(new File("src/test/resources/krb5.conf"));
|
clientService = new MockHBaseClientService(table, "family");
|
||||||
clientService = new MockHBaseClientService(table, "family", kerberosProperties);
|
|
||||||
runner.addControllerService("clientService", clientService);
|
runner.addControllerService("clientService", clientService);
|
||||||
runner.setProperty(clientService, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
|
runner.setProperty(clientService, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
|
||||||
runner.enableControllerService(clientService);
|
runner.enableControllerService(clientService);
|
||||||
|
@ -1 +0,0 @@
|
|||||||
/bin/
|
|
@ -1,24 +0,0 @@
|
|||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
|
||||||
this work for additional information regarding copyright ownership.
|
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
(the "License"); you may not use this file except in compliance with
|
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
<parent>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-standard-services</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</parent>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
<packaging>jar</packaging>
|
|
||||||
</project>
|
|
@ -1,38 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.nifi.kerberos;
|
|
||||||
|
|
||||||
import org.apache.nifi.controller.ControllerService;
|
|
||||||
|
|
||||||
public interface KerberosCredentialsService extends ControllerService {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the path to the configured Keytab file
|
|
||||||
*
|
|
||||||
* @return the path to the configured Keytab file
|
|
||||||
*/
|
|
||||||
String getKeytab();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the configured Principal to use when authenticating with Kerberos
|
|
||||||
*
|
|
||||||
* @return the configured Principal to use when authenticating with Kerberos
|
|
||||||
*/
|
|
||||||
String getPrincipal();
|
|
||||||
|
|
||||||
}
|
|
@ -1,38 +0,0 @@
|
|||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
|
||||||
this work for additional information regarding copyright ownership.
|
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
(the "License"); you may not use this file except in compliance with
|
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
<parent>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-bundle</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</parent>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-nar</artifactId>
|
|
||||||
<packaging>nar</packaging>
|
|
||||||
|
|
||||||
<dependencies>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-standard-services-api-nar</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
<type>nar</type>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
</project>
|
|
@ -1 +0,0 @@
|
|||||||
/bin/
|
|
@ -1,26 +0,0 @@
|
|||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<!-- Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with this
|
|
||||||
work for additional information regarding copyright ownership. The ASF licenses
|
|
||||||
this file to You under the Apache License, Version 2.0 (the "License"); you
|
|
||||||
may not use this file except in compliance with the License. You may obtain
|
|
||||||
a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless
|
|
||||||
required by applicable law or agreed to in writing, software distributed
|
|
||||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
|
|
||||||
OR CONDITIONS OF ANY KIND, either express or implied. See the License for
|
|
||||||
the specific language governing permissions and limitations under the License. -->
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
<parent>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-bundle</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</parent>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service</artifactId>
|
|
||||||
<packaging>jar</packaging>
|
|
||||||
<dependencies>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
</project>
|
|
@ -1,125 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright ownership.
|
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
* (the "License"); you may not use this file except in compliance with
|
|
||||||
* the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.nifi.kerberos;
|
|
||||||
|
|
||||||
import org.apache.nifi.annotation.behavior.Restricted;
|
|
||||||
import org.apache.nifi.annotation.behavior.Restriction;
|
|
||||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
|
||||||
import org.apache.nifi.annotation.documentation.Tags;
|
|
||||||
import org.apache.nifi.annotation.lifecycle.OnEnabled;
|
|
||||||
import org.apache.nifi.components.PropertyDescriptor;
|
|
||||||
import org.apache.nifi.components.RequiredPermission;
|
|
||||||
import org.apache.nifi.components.ValidationContext;
|
|
||||||
import org.apache.nifi.components.ValidationResult;
|
|
||||||
import org.apache.nifi.components.resource.ResourceCardinality;
|
|
||||||
import org.apache.nifi.components.resource.ResourceType;
|
|
||||||
import org.apache.nifi.controller.AbstractControllerService;
|
|
||||||
import org.apache.nifi.controller.ConfigurationContext;
|
|
||||||
import org.apache.nifi.controller.ControllerServiceInitializationContext;
|
|
||||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
|
||||||
import org.apache.nifi.processor.util.StandardValidators;
|
|
||||||
import org.apache.nifi.reporting.InitializationException;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@CapabilityDescription("Provides a mechanism for specifying a Keytab and a Principal that other components are able to use in order to "
|
|
||||||
+ "perform authentication using Kerberos. By encapsulating this information into a Controller Service and allowing other components to make use of it "
|
|
||||||
+ "(as opposed to specifying the principal and keytab directly in the processor) an administrator is able to choose which users are allowed to "
|
|
||||||
+ "use which keytabs and principals. This provides a more robust security model for multi-tenant use cases.")
|
|
||||||
@Tags({"Kerberos", "Keytab", "Principal", "Credentials", "Authentication", "Security"})
|
|
||||||
@Restricted(restrictions = {
|
|
||||||
@Restriction(requiredPermission = RequiredPermission.ACCESS_KEYTAB, explanation = "Allows user to define a Keytab and principal that can then be used by other components.")
|
|
||||||
})
|
|
||||||
public class KeytabCredentialsService extends AbstractControllerService implements KerberosCredentialsService {
|
|
||||||
|
|
||||||
static final PropertyDescriptor PRINCIPAL = new PropertyDescriptor.Builder()
|
|
||||||
.name("Kerberos Principal")
|
|
||||||
.description("Kerberos principal to authenticate as. Requires nifi.kerberos.krb5.file to be set in your nifi.properties")
|
|
||||||
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
|
|
||||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
|
||||||
.required(true)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
static final PropertyDescriptor KEYTAB = new PropertyDescriptor.Builder()
|
|
||||||
.name("Kerberos Keytab")
|
|
||||||
.description("Kerberos keytab associated with the principal. Requires nifi.kerberos.krb5.file to be set in your nifi.properties")
|
|
||||||
.identifiesExternalResource(ResourceCardinality.SINGLE, ResourceType.FILE)
|
|
||||||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
|
||||||
.required(true)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
private File kerberosConfigFile;
|
|
||||||
private volatile String principal;
|
|
||||||
private volatile String keytab;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected final void init(final ControllerServiceInitializationContext config) throws InitializationException {
|
|
||||||
kerberosConfigFile = config.getKerberosConfigurationFile();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
|
|
||||||
final List<ValidationResult> results = new ArrayList<>();
|
|
||||||
|
|
||||||
// Check that the Kerberos configuration is set
|
|
||||||
if (kerberosConfigFile == null) {
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos Configuration File")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("The nifi.kerberos.krb5.file property must be set in nifi.properties in order to use Kerberos authentication")
|
|
||||||
.build());
|
|
||||||
} else if (!kerberosConfigFile.canRead()) {
|
|
||||||
// Check that the Kerberos configuration is readable
|
|
||||||
results.add(new ValidationResult.Builder()
|
|
||||||
.subject("Kerberos Configuration File")
|
|
||||||
.valid(false)
|
|
||||||
.explanation("Unable to read configured Kerberos Configuration File " + kerberosConfigFile.getAbsolutePath() + ", which is specified in nifi.properties. "
|
|
||||||
+ "Please ensure that the path is valid and that NiFi has adequate permissions to read the file.")
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
|
||||||
final List<PropertyDescriptor> properties = new ArrayList<>(2);
|
|
||||||
properties.add(KEYTAB);
|
|
||||||
properties.add(PRINCIPAL);
|
|
||||||
return properties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@OnEnabled
|
|
||||||
public void setConfiguredValues(final ConfigurationContext context) {
|
|
||||||
this.keytab = context.getProperty(KEYTAB).evaluateAttributeExpressions().getValue();
|
|
||||||
this.principal = context.getProperty(PRINCIPAL).evaluateAttributeExpressions().getValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getKeytab() {
|
|
||||||
return keytab;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getPrincipal() {
|
|
||||||
return principal;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,16 +0,0 @@
|
|||||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
# contributor license agreements. See the NOTICE file distributed with
|
|
||||||
# this work for additional information regarding copyright ownership.
|
|
||||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
# (the "License"); you may not use this file except in compliance with
|
|
||||||
# the License. You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
org.apache.nifi.kerberos.KeytabCredentialsService
|
|
@ -1,28 +0,0 @@
|
|||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
|
||||||
this work for additional information regarding copyright ownership.
|
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
(the "License"); you may not use this file except in compliance with
|
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
<parent>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-standard-services</artifactId>
|
|
||||||
<version>2.0.0-SNAPSHOT</version>
|
|
||||||
</parent>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-bundle</artifactId>
|
|
||||||
<packaging>pom</packaging>
|
|
||||||
<modules>
|
|
||||||
<module>nifi-kerberos-credentials-service</module>
|
|
||||||
<module>nifi-kerberos-credentials-service-nar</module>
|
|
||||||
</modules>
|
|
||||||
</project>
|
|
@ -94,11 +94,6 @@
|
|||||||
<artifactId>nifi-lookup-service-api</artifactId>
|
<artifactId>nifi-lookup-service-api</artifactId>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.nifi</groupId>
|
|
||||||
<artifactId>nifi-kerberos-credentials-service-api</artifactId>
|
|
||||||
<scope>compile</scope>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.nifi</groupId>
|
<groupId>org.apache.nifi</groupId>
|
||||||
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
<artifactId>nifi-kerberos-user-service-api</artifactId>
|
||||||
|
@ -43,8 +43,6 @@
|
|||||||
<module>nifi-schema-registry-service-api</module>
|
<module>nifi-schema-registry-service-api</module>
|
||||||
<module>nifi-record-serialization-service-api</module>
|
<module>nifi-record-serialization-service-api</module>
|
||||||
<module>nifi-record-serialization-services-bundle</module>
|
<module>nifi-record-serialization-services-bundle</module>
|
||||||
<module>nifi-kerberos-credentials-service-api</module>
|
|
||||||
<module>nifi-kerberos-credentials-service-bundle</module>
|
|
||||||
<module>nifi-proxy-configuration-api</module>
|
<module>nifi-proxy-configuration-api</module>
|
||||||
<module>nifi-proxy-configuration-bundle</module>
|
<module>nifi-proxy-configuration-bundle</module>
|
||||||
<module>nifi-key-service-api</module>
|
<module>nifi-key-service-api</module>
|
||||||
|
@ -53,16 +53,6 @@ export NIFI_LOG_DIR
|
|||||||
# Disable automatic Logback Initializer to avoid shutdown on web application termination
|
# Disable automatic Logback Initializer to avoid shutdown on web application termination
|
||||||
export logbackDisableServletContainerInitializer="true"
|
export logbackDisableServletContainerInitializer="true"
|
||||||
|
|
||||||
# Set to false to force the use of Keytab controller service in processors
|
|
||||||
# that use Kerberos. If true, these processors will allow configuration of keytab
|
|
||||||
# and principal directly within the processor. If false, these processors will be
|
|
||||||
# invalid if attempting to configure these properties. This may be advantageous in
|
|
||||||
# a multi-tenant environment where management of keytabs should be performed only by
|
|
||||||
# a user with elevated permissions (i.e., users that have been granted the 'ACCESS_KEYTAB'
|
|
||||||
# restriction).
|
|
||||||
NIFI_ALLOW_EXPLICIT_KEYTAB="$(setOrDefault "$NIFI_ALLOW_EXPLICIT_KEYTAB" true)"
|
|
||||||
export NIFI_ALLOW_EXPLICIT_KEYTAB
|
|
||||||
|
|
||||||
# Set to true to deny access to the Local File System from HDFS Processors
|
# Set to true to deny access to the Local File System from HDFS Processors
|
||||||
# This flag forces HDFS Processors to evaluate the File System path during scheduling
|
# This flag forces HDFS Processors to evaluate the File System path during scheduling
|
||||||
NIFI_HDFS_DENY_LOCAL_FILE_SYSTEM_ACCESS="$(setOrDefault "$NIFI_HDFS_DENY_LOCAL_FILE_SYSTEM_ACCESS" false)"
|
NIFI_HDFS_DENY_LOCAL_FILE_SYSTEM_ACCESS="$(setOrDefault "$NIFI_HDFS_DENY_LOCAL_FILE_SYSTEM_ACCESS" false)"
|
||||||
|
@ -27,15 +27,6 @@ export NIFI_PID_DIR="${NIFI_HOME}/run"
|
|||||||
#The directory for NiFi log files
|
#The directory for NiFi log files
|
||||||
export NIFI_LOG_DIR="${NIFI_HOME}/logs"
|
export NIFI_LOG_DIR="${NIFI_HOME}/logs"
|
||||||
|
|
||||||
# Set to false to force the use of Keytab controller service in processors
|
|
||||||
# that use Kerberos. If true, these processors will allow configuration of keytab
|
|
||||||
# and principal directly within the processor. If false, these processors will be
|
|
||||||
# invalid if attempting to configure these properties. This may be advantageous in
|
|
||||||
# a multi-tenant environment where management of keytabs should be performed only by
|
|
||||||
# a user with elevated permissions (i.e., users that have been granted the 'ACCESS_KEYTAB'
|
|
||||||
# restriction).
|
|
||||||
export NIFI_ALLOW_EXPLICIT_KEYTAB=true
|
|
||||||
|
|
||||||
# Set to true to deny access to the Local File System from HDFS Processors
|
# Set to true to deny access to the Local File System from HDFS Processors
|
||||||
# This flag forces HDFS Processors to evaluate the File System path during scheduling
|
# This flag forces HDFS Processors to evaluate the File System path during scheduling
|
||||||
export NIFI_HDFS_DENY_LOCAL_FILE_SYSTEM_ACCESS=false
|
export NIFI_HDFS_DENY_LOCAL_FILE_SYSTEM_ACCESS=false
|
Loading…
x
Reference in New Issue
Block a user