MAPREDUCE-6494. Permission issue when running archive-logs tool as different users (rkanter)
(cherry picked from commit 5db371f52f
)
This commit is contained in:
parent
7273ef0590
commit
46ef5aa8eb
|
@ -316,6 +316,9 @@ Release 2.8.0 - UNRELEASED
|
||||||
|
|
||||||
MAPREDUCE-6480. archive-logs tool may miss applications (rkanter)
|
MAPREDUCE-6480. archive-logs tool may miss applications (rkanter)
|
||||||
|
|
||||||
|
MAPREDUCE-6494. Permission issue when running archive-logs tool as
|
||||||
|
different users (rkanter)
|
||||||
|
|
||||||
Release 2.7.2 - UNRELEASED
|
Release 2.7.2 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -76,6 +76,7 @@ public class HadoopArchiveLogs implements Tool {
|
||||||
private static final String MAX_TOTAL_LOGS_SIZE_OPTION = "maxTotalLogsSize";
|
private static final String MAX_TOTAL_LOGS_SIZE_OPTION = "maxTotalLogsSize";
|
||||||
private static final String MEMORY_OPTION = "memory";
|
private static final String MEMORY_OPTION = "memory";
|
||||||
private static final String VERBOSE_OPTION = "verbose";
|
private static final String VERBOSE_OPTION = "verbose";
|
||||||
|
private static final String FORCE_OPTION = "force";
|
||||||
|
|
||||||
private static final int DEFAULT_MAX_ELIGIBLE = -1;
|
private static final int DEFAULT_MAX_ELIGIBLE = -1;
|
||||||
private static final int DEFAULT_MIN_NUM_LOG_FILES = 20;
|
private static final int DEFAULT_MIN_NUM_LOG_FILES = 20;
|
||||||
|
@ -91,6 +92,8 @@ public class HadoopArchiveLogs implements Tool {
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
long memory = DEFAULT_MEMORY;
|
long memory = DEFAULT_MEMORY;
|
||||||
private boolean verbose = false;
|
private boolean verbose = false;
|
||||||
|
@VisibleForTesting
|
||||||
|
boolean force = false;
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
Set<AppInfo> eligibleApplications;
|
Set<AppInfo> eligibleApplications;
|
||||||
|
@ -126,6 +129,8 @@ public class HadoopArchiveLogs implements Tool {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int run(String[] args) throws Exception {
|
public int run(String[] args) throws Exception {
|
||||||
|
int exitCode = 1;
|
||||||
|
|
||||||
handleOpts(args);
|
handleOpts(args);
|
||||||
|
|
||||||
FileSystem fs = null;
|
FileSystem fs = null;
|
||||||
|
@ -141,44 +146,41 @@ public class HadoopArchiveLogs implements Tool {
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
fs = FileSystem.get(conf);
|
fs = FileSystem.get(conf);
|
||||||
checkFilesAndSeedApps(fs, remoteRootLogDir, suffix);
|
if (prepareWorkingDir(fs, workingDir)) {
|
||||||
|
|
||||||
// Prepare working directory
|
checkFilesAndSeedApps(fs, remoteRootLogDir, suffix);
|
||||||
if (fs.exists(workingDir)) {
|
|
||||||
fs.delete(workingDir, true);
|
filterAppsByAggregatedStatus();
|
||||||
|
|
||||||
|
checkMaxEligible();
|
||||||
|
|
||||||
|
if (eligibleApplications.isEmpty()) {
|
||||||
|
LOG.info("No eligible applications to process");
|
||||||
|
exitCode = 0;
|
||||||
|
} else {
|
||||||
|
StringBuilder sb =
|
||||||
|
new StringBuilder("Will process the following applications:");
|
||||||
|
for (AppInfo app : eligibleApplications) {
|
||||||
|
sb.append("\n\t").append(app.getAppId());
|
||||||
|
}
|
||||||
|
LOG.info(sb.toString());
|
||||||
|
|
||||||
|
File localScript = File.createTempFile("hadoop-archive-logs-", ".sh");
|
||||||
|
generateScript(localScript, workingDir, remoteRootLogDir, suffix);
|
||||||
|
|
||||||
|
exitCode = runDistributedShell(localScript) ? 0 : 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
fs.mkdirs(workingDir);
|
|
||||||
fs.setPermission(workingDir,
|
|
||||||
new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
|
|
||||||
} finally {
|
} finally {
|
||||||
if (fs != null) {
|
if (fs != null) {
|
||||||
|
// Cleanup working directory
|
||||||
|
if (fs.exists(workingDir)) {
|
||||||
|
fs.delete(workingDir, true);
|
||||||
|
}
|
||||||
fs.close();
|
fs.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return exitCode;
|
||||||
filterAppsByAggregatedStatus();
|
|
||||||
|
|
||||||
checkMaxEligible();
|
|
||||||
|
|
||||||
if (eligibleApplications.isEmpty()) {
|
|
||||||
LOG.info("No eligible applications to process");
|
|
||||||
System.exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
StringBuilder sb =
|
|
||||||
new StringBuilder("Will process the following applications:");
|
|
||||||
for (AppInfo app : eligibleApplications) {
|
|
||||||
sb.append("\n\t").append(app.getAppId());
|
|
||||||
}
|
|
||||||
LOG.info(sb.toString());
|
|
||||||
|
|
||||||
File localScript = File.createTempFile("hadoop-archive-logs-", ".sh");
|
|
||||||
generateScript(localScript, workingDir, remoteRootLogDir, suffix);
|
|
||||||
|
|
||||||
if (runDistributedShell(localScript)) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return -1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void handleOpts(String[] args) throws ParseException {
|
private void handleOpts(String[] args) throws ParseException {
|
||||||
|
@ -202,12 +204,17 @@ public class HadoopArchiveLogs implements Tool {
|
||||||
memoryOpt.setArgName("megabytes");
|
memoryOpt.setArgName("megabytes");
|
||||||
Option verboseOpt = new Option(VERBOSE_OPTION, false,
|
Option verboseOpt = new Option(VERBOSE_OPTION, false,
|
||||||
"Print more details.");
|
"Print more details.");
|
||||||
|
Option forceOpt = new Option(FORCE_OPTION, false,
|
||||||
|
"Force recreating the working directory if an existing one is found. " +
|
||||||
|
"This should only be used if you know that another instance is " +
|
||||||
|
"not currently running");
|
||||||
opts.addOption(helpOpt);
|
opts.addOption(helpOpt);
|
||||||
opts.addOption(maxEligibleOpt);
|
opts.addOption(maxEligibleOpt);
|
||||||
opts.addOption(minNumLogFilesOpt);
|
opts.addOption(minNumLogFilesOpt);
|
||||||
opts.addOption(maxTotalLogsSizeOpt);
|
opts.addOption(maxTotalLogsSizeOpt);
|
||||||
opts.addOption(memoryOpt);
|
opts.addOption(memoryOpt);
|
||||||
opts.addOption(verboseOpt);
|
opts.addOption(verboseOpt);
|
||||||
|
opts.addOption(forceOpt);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
CommandLineParser parser = new GnuParser();
|
CommandLineParser parser = new GnuParser();
|
||||||
|
@ -242,6 +249,9 @@ public class HadoopArchiveLogs implements Tool {
|
||||||
if (commandLine.hasOption(VERBOSE_OPTION)) {
|
if (commandLine.hasOption(VERBOSE_OPTION)) {
|
||||||
verbose = true;
|
verbose = true;
|
||||||
}
|
}
|
||||||
|
if (commandLine.hasOption(FORCE_OPTION)) {
|
||||||
|
force = true;
|
||||||
|
}
|
||||||
} catch (ParseException pe) {
|
} catch (ParseException pe) {
|
||||||
HelpFormatter formatter = new HelpFormatter();
|
HelpFormatter formatter = new HelpFormatter();
|
||||||
formatter.printHelp("yarn archive-logs", opts);
|
formatter.printHelp("yarn archive-logs", opts);
|
||||||
|
@ -249,6 +259,25 @@ public class HadoopArchiveLogs implements Tool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
boolean prepareWorkingDir(FileSystem fs, Path workingDir) throws IOException {
|
||||||
|
if (fs.exists(workingDir)) {
|
||||||
|
if (force) {
|
||||||
|
LOG.info("Existing Working Dir detected: -" + FORCE_OPTION +
|
||||||
|
" specified -> recreating Working Dir");
|
||||||
|
fs.delete(workingDir, true);
|
||||||
|
} else {
|
||||||
|
LOG.info("Existing Working Dir detected: -" + FORCE_OPTION +
|
||||||
|
" not specified -> exiting");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fs.mkdirs(workingDir);
|
||||||
|
fs.setPermission(workingDir,
|
||||||
|
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
void filterAppsByAggregatedStatus() throws IOException, YarnException {
|
void filterAppsByAggregatedStatus() throws IOException, YarnException {
|
||||||
YarnClient client = YarnClient.createYarnClient();
|
YarnClient client = YarnClient.createYarnClient();
|
||||||
|
|
|
@ -23,6 +23,8 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.fs.permission.FsAction;
|
||||||
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
|
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
|
||||||
|
@ -309,6 +311,44 @@ public class TestHadoopArchiveLogs {
|
||||||
Assert.assertArrayEquals(statuses, LogAggregationStatus.values());
|
Assert.assertArrayEquals(statuses, LogAggregationStatus.values());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout = 5000)
|
||||||
|
public void testPrepareWorkingDir() throws Exception {
|
||||||
|
Configuration conf = new Configuration();
|
||||||
|
HadoopArchiveLogs hal = new HadoopArchiveLogs(conf);
|
||||||
|
FileSystem fs = FileSystem.getLocal(conf);
|
||||||
|
Path workingDir = new Path("target", "testPrepareWorkingDir");
|
||||||
|
fs.delete(workingDir, true);
|
||||||
|
Assert.assertFalse(fs.exists(workingDir));
|
||||||
|
// -force is false and the dir doesn't exist so it will create one
|
||||||
|
hal.force = false;
|
||||||
|
boolean dirPrepared = hal.prepareWorkingDir(fs, workingDir);
|
||||||
|
Assert.assertTrue(dirPrepared);
|
||||||
|
Assert.assertTrue(fs.exists(workingDir));
|
||||||
|
Assert.assertEquals(
|
||||||
|
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE),
|
||||||
|
fs.getFileStatus(workingDir).getPermission());
|
||||||
|
// Throw a file in the dir
|
||||||
|
Path dummyFile = new Path(workingDir, "dummy.txt");
|
||||||
|
fs.createNewFile(dummyFile);
|
||||||
|
Assert.assertTrue(fs.exists(dummyFile));
|
||||||
|
// -force is false and the dir exists, so nothing will happen and the dummy
|
||||||
|
// still exists
|
||||||
|
dirPrepared = hal.prepareWorkingDir(fs, workingDir);
|
||||||
|
Assert.assertFalse(dirPrepared);
|
||||||
|
Assert.assertTrue(fs.exists(workingDir));
|
||||||
|
Assert.assertTrue(fs.exists(dummyFile));
|
||||||
|
// -force is true and the dir exists, so it will recreate it and the dummy
|
||||||
|
// won't exist anymore
|
||||||
|
hal.force = true;
|
||||||
|
dirPrepared = hal.prepareWorkingDir(fs, workingDir);
|
||||||
|
Assert.assertTrue(dirPrepared);
|
||||||
|
Assert.assertTrue(fs.exists(workingDir));
|
||||||
|
Assert.assertEquals(
|
||||||
|
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE),
|
||||||
|
fs.getFileStatus(workingDir).getPermission());
|
||||||
|
Assert.assertFalse(fs.exists(dummyFile));
|
||||||
|
}
|
||||||
|
|
||||||
private static void createFile(FileSystem fs, Path p, long sizeMultiple)
|
private static void createFile(FileSystem fs, Path p, long sizeMultiple)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
FSDataOutputStream out = null;
|
FSDataOutputStream out = null;
|
||||||
|
|
Loading…
Reference in New Issue