diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java index 7ac14b1843..51f6a8241c 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/PutHDFS.java @@ -19,7 +19,6 @@ package org.apache.nifi.processors.hadoop; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsCreateModes; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.ipc.RemoteException; @@ -149,8 +148,9 @@ public class PutHDFS extends AbstractHadoopProcessor { public static final PropertyDescriptor UMASK = new PropertyDescriptor.Builder() .name("Permissions umask") .description( - "A umask represented as an octal number which determines the permissions of files written to HDFS. " - + "If this property is empty, processor uses fs.permission.umask-mode. If fs.permission.umask-mode is undefined, processor honors FsPermission.DEFAULT_UMASK.") + "A umask represented as an octal number which determines the permissions of files written to HDFS. " + + "This overrides the Hadoop property \"fs.permission.umask-mode\". " + + "If this property and \"fs.permission.umask-mode\" are undefined, the Hadoop default \"022\" will be used.") .addValidator(HadoopValidators.UMASK_VALIDATOR) .build(); @@ -224,7 +224,6 @@ public class PutHDFS extends AbstractHadoopProcessor { final FileSystem hdfs = getFileSystem(); final Configuration configuration = getConfiguration(); - final UserGroupInformation ugi = getUserGroupInformation(); if (configuration == null || hdfs == null || ugi == null) { diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/PutHDFSTest.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/PutHDFSTest.java index 6f74766ae8..9c51f34c72 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/PutHDFSTest.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/PutHDFSTest.java @@ -369,56 +369,67 @@ public class PutHDFSTest { } @Test - public void testPutFilePermissionsWithProcessorConfiguredUmask() throws FileNotFoundException, IOException { + public void testPutFilePermissionsWithProcessorConfiguredUmask() throws IOException { // assert the file permission is the same value as processor's property - PutHDFS proc = new TestablePutHDFS(kerberosProperties, mockFileSystem); + MockFileSystem fileSystem = new MockFileSystem(); + PutHDFS proc = new TestablePutHDFS(kerberosProperties, fileSystem); TestRunner runner = TestRunners.newTestRunner(proc); runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes"); runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace"); - runner.setProperty(PutHDFS.UMASK, "777"); + String umaskPropertyValue = "027"; + runner.setProperty(PutHDFS.UMASK, umaskPropertyValue); + // invoke the abstractOnScheduled method so the Hadoop configuration is available to apply the MockFileSystem instance + proc.abstractOnScheduled(runner.getProcessContext()); + fileSystem.setConf(proc.getConfiguration()); try (FileInputStream fis = new FileInputStream("src/test/resources/testdata/randombytes-1")) { Map attributes = new HashMap<>(); attributes.put(CoreAttributes.FILENAME.key(), "randombytes-1"); runner.enqueue(fis, attributes); runner.run(); } - assertEquals(FsPermission.getFileDefault().applyUMask(new FsPermission("777")), mockFileSystem.getFileStatus(new Path("target/test-classes/randombytes-1")).getPermission()); + assertEquals(FsPermission.getFileDefault().applyUMask(new FsPermission(umaskPropertyValue)), fileSystem.getFileStatus(new Path("target/test-classes/randombytes-1")).getPermission()); } @Test - public void testPutFilePermissionsWithXmlConfiguredUmask() throws FileNotFoundException, IOException { + public void testPutFilePermissionsWithXmlConfiguredUmask() throws IOException { // assert the file permission is the same value as xml - PutHDFS proc = new TestablePutHDFS(kerberosProperties, mockFileSystem); + MockFileSystem fileSystem = new MockFileSystem(); + PutHDFS proc = new TestablePutHDFS(kerberosProperties, fileSystem); TestRunner runner = TestRunners.newTestRunner(proc); runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes"); runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace"); runner.setProperty(PutHDFS.HADOOP_CONFIGURATION_RESOURCES, "src/test/resources/core-site-perms.xml"); + // invoke the abstractOnScheduled method so the Hadoop configuration is available to apply the MockFileSystem instance + proc.abstractOnScheduled(runner.getProcessContext()); + fileSystem.setConf(proc.getConfiguration()); try (FileInputStream fis = new FileInputStream("src/test/resources/testdata/randombytes-1")) { Map attributes = new HashMap<>(); attributes.put(CoreAttributes.FILENAME.key(), "randombytes-1"); runner.enqueue(fis, attributes); runner.run(); } - assertEquals(FsPermission.getFileDefault().applyUMask(new FsPermission("777")), mockFileSystem.getFileStatus(new Path("target/test-classes/randombytes-1")).getPermission()); + assertEquals(FsPermission.getFileDefault().applyUMask(new FsPermission("777")), fileSystem.getFileStatus(new Path("target/test-classes/randombytes-1")).getPermission()); } @Test - public void testPutFIlePermissionsWithNoConfiguredUmask() throws FileNotFoundException, IOException { + public void testPutFilePermissionsWithNoConfiguredUmask() throws IOException { // assert the file permission fallback works. It should read FsPermission.DEFAULT_UMASK - PutHDFS proc = new TestablePutHDFS(kerberosProperties, mockFileSystem); + MockFileSystem fileSystem = new MockFileSystem(); + PutHDFS proc = new TestablePutHDFS(kerberosProperties, fileSystem); TestRunner runner = TestRunners.newTestRunner(proc); runner.setProperty(PutHDFS.DIRECTORY, "target/test-classes"); runner.setProperty(PutHDFS.CONFLICT_RESOLUTION, "replace"); + // invoke the abstractOnScheduled method so the Hadoop configuration is available to apply the MockFileSystem instance + proc.abstractOnScheduled(runner.getProcessContext()); + fileSystem.setConf(proc.getConfiguration()); try (FileInputStream fis = new FileInputStream("src/test/resources/testdata/randombytes-1")) { Map attributes = new HashMap<>(); attributes.put(CoreAttributes.FILENAME.key(), "randombytes-1"); runner.enqueue(fis, attributes); runner.run(); } - assertEquals( - FsPermission.getFileDefault().applyUMask(new FsPermission((short)FsPermission.DEFAULT_UMASK)), - mockFileSystem.getFileStatus(new Path("target/test-classes/randombytes-1")).getPermission() - ); + assertEquals(FsPermission.getFileDefault().applyUMask(new FsPermission((short)FsPermission.DEFAULT_UMASK)), + fileSystem.getFileStatus(new Path("target/test-classes/randombytes-1")).getPermission()); } private class TestablePutHDFS extends PutHDFS { @@ -438,6 +449,7 @@ public class PutHDFSTest { @Override protected FileSystem getFileSystem(Configuration config) { + fileSystem.setConf(config); return fileSystem; }