MapReduce JHS should honor yarn.webapp.filter-entity-list-by-user(addendum). Contributed by Sunil Govindan.

This commit is contained in:
Rohith Sharma K S 2018-06-02 09:27:03 +05:30
parent e11d674049
commit d5e69d8994
4 changed files with 56 additions and 39 deletions

View File

@ -27,8 +27,6 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@ -41,10 +39,8 @@ import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI; import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.ResponseInfo; import org.apache.hadoop.yarn.webapp.ResponseInfo;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;
@ -60,14 +56,9 @@ import com.google.inject.Inject;
*/ */
public class HsJobBlock extends HtmlBlock { public class HsJobBlock extends HtmlBlock {
final AppContext appContext; final AppContext appContext;
private UserGroupInformation ugi;
private boolean isFilterAppListByUserEnabled;
@Inject HsJobBlock(Configuration conf, AppContext appctx, ViewContext ctx) { @Inject HsJobBlock(AppContext appctx) {
super(ctx);
appContext = appctx; appContext = appctx;
isFilterAppListByUserEnabled = conf
.getBoolean(YarnConfiguration.FILTER_ENTITY_LIST_BY_USER, false);
} }
/* /*
@ -87,13 +78,6 @@ public class HsJobBlock extends HtmlBlock {
html.p().__("Sorry, ", jid, " not found.").__(); html.p().__("Sorry, ", jid, " not found.").__();
return; return;
} }
ugi = getCallerUGI();
if (isFilterAppListByUserEnabled && ugi != null
&& !j.checkAccess(ugi, JobACL.VIEW_JOB)) {
html.p().__("Sorry, ", jid, " could not be viewed for '",
ugi.getUserName(), "'.").__();
return;
}
if(j instanceof UnparsedJob) { if(j instanceof UnparsedJob) {
final int taskCount = j.getTotalMaps() + j.getTotalReduces(); final int taskCount = j.getTotalMaps() + j.getTotalReduces();
UnparsedJob oversizedJob = (UnparsedJob) j; UnparsedJob oversizedJob = (UnparsedJob) j;

View File

@ -22,10 +22,15 @@ import java.text.SimpleDateFormat;
import java.util.Date; import java.util.Date;
import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet.TABLE; import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet.TABLE;
@ -42,9 +47,19 @@ public class HsJobsBlock extends HtmlBlock {
final AppContext appContext; final AppContext appContext;
final SimpleDateFormat dateFormat = final SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy.MM.dd HH:mm:ss z"); new SimpleDateFormat("yyyy.MM.dd HH:mm:ss z");
private UserGroupInformation ugi;
private boolean isFilterAppListByUserEnabled;
private boolean areAclsEnabled;
private AccessControlList adminAclList;
@Inject HsJobsBlock(AppContext appCtx) { @Inject
HsJobsBlock(Configuration conf, AppContext appCtx, ViewContext ctx) {
super(ctx);
appContext = appCtx; appContext = appCtx;
isFilterAppListByUserEnabled = conf
.getBoolean(YarnConfiguration.FILTER_ENTITY_LIST_BY_USER, false);
areAclsEnabled = conf.getBoolean(MRConfig.MR_ACLS_ENABLED, false);
adminAclList = new AccessControlList(conf.get(MRConfig.MR_ADMINS, " "));
} }
/* /*
@ -77,6 +92,12 @@ public class HsJobsBlock extends HtmlBlock {
StringBuilder jobsTableData = new StringBuilder("[\n"); StringBuilder jobsTableData = new StringBuilder("[\n");
for (Job j : appContext.getAllJobs().values()) { for (Job j : appContext.getAllJobs().values()) {
JobInfo job = new JobInfo(j); JobInfo job = new JobInfo(j);
ugi = getCallerUGI();
// Allow to list only per-user apps if incoming ugi has permission.
if (isFilterAppListByUserEnabled && ugi != null
&& !checkAccess(job.getUserName())) {
continue;
}
jobsTableData.append("[\"") jobsTableData.append("[\"")
.append(dateFormat.format(new Date(job.getSubmitTime()))).append("\",\"") .append(dateFormat.format(new Date(job.getSubmitTime()))).append("\",\"")
.append(job.getFormattedStartTimeStr(dateFormat)).append("\",\"") .append(job.getFormattedStartTimeStr(dateFormat)).append("\",\"")
@ -139,4 +160,21 @@ public class HsJobsBlock extends HtmlBlock {
__(). __().
__(); __();
} }
private boolean checkAccess(String userName) {
if(!areAclsEnabled) {
return true;
}
// User could see its own job.
if (ugi.getShortUserName().equals(userName)) {
return true;
}
// Admin could also see all jobs
if (adminAclList != null && adminAclList.isUserAllowed(ugi)) {
return true;
}
return false;
}
} }

View File

@ -59,6 +59,8 @@ import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
import org.apache.hadoop.yarn.webapp.Controller.RequestContext; import org.apache.hadoop.yarn.webapp.Controller.RequestContext;
import org.apache.hadoop.yarn.webapp.View.ViewContext;
import org.apache.hadoop.yarn.webapp.Controller;
import org.apache.hadoop.yarn.webapp.Params; import org.apache.hadoop.yarn.webapp.Params;
import org.apache.hadoop.yarn.webapp.View; import org.apache.hadoop.yarn.webapp.View;
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage; import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage;
@ -223,7 +225,14 @@ public class TestBlocks {
jobs.put(job.getID(), job); jobs.put(job.getID(), job);
when(ctx.getAllJobs()).thenReturn(jobs); when(ctx.getAllJobs()).thenReturn(jobs);
HsJobsBlock block = new HsJobsBlockForTest(ctx); Controller.RequestContext rc = mock(Controller.RequestContext.class);
ViewContext view = mock(ViewContext.class);
HttpServletRequest req =mock(HttpServletRequest.class);
when(rc.getRequest()).thenReturn(req);
when(view.requestContext()).thenReturn(rc);
Configuration conf = new Configuration();
HsJobsBlock block = new HsJobsBlockForTest(conf, ctx, view);
PrintWriter pWriter = new PrintWriter(data); PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false); Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html); block.render(html);
@ -400,8 +409,10 @@ public class TestBlocks {
} }
private class HsJobsBlockForTest extends HsJobsBlock { private class HsJobsBlockForTest extends HsJobsBlock {
HsJobsBlockForTest(AppContext appCtx) {
super(appCtx); HsJobsBlockForTest(Configuration conf, AppContext appCtx,
ViewContext view) {
super(conf, appCtx, view);
} }
@Override @Override

View File

@ -33,10 +33,8 @@ import org.apache.hadoop.mapreduce.v2.hs.UnparsedJob;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.StringHelper; import org.apache.hadoop.yarn.util.StringHelper;
import org.apache.hadoop.yarn.webapp.Controller;
import org.apache.hadoop.yarn.webapp.ResponseInfo; import org.apache.hadoop.yarn.webapp.ResponseInfo;
import org.apache.hadoop.yarn.webapp.SubView; import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.View.ViewContext;
import org.apache.hadoop.yarn.webapp.view.BlockForTest; import org.apache.hadoop.yarn.webapp.view.BlockForTest;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.HtmlBlockForTest; import org.apache.hadoop.yarn.webapp.view.HtmlBlockForTest;
@ -51,8 +49,6 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -73,13 +69,7 @@ public class TestHsJobBlock {
new JobHistoryStubWithAllOversizeJobs(maxAllowedTaskNum); new JobHistoryStubWithAllOversizeJobs(maxAllowedTaskNum);
jobHistory.init(config); jobHistory.init(config);
Controller.RequestContext rc = mock(Controller.RequestContext.class); HsJobBlock jobBlock = new HsJobBlock(jobHistory) {
ViewContext view = mock(ViewContext.class);
HttpServletRequest req =mock(HttpServletRequest.class);
when(rc.getRequest()).thenReturn(req);
when(view.requestContext()).thenReturn(rc);
HsJobBlock jobBlock = new HsJobBlock(config, jobHistory, view) {
// override this so that job block can fetch a job id. // override this so that job block can fetch a job id.
@Override @Override
public Map<String, String> moreParams() { public Map<String, String> moreParams() {
@ -111,13 +101,7 @@ public class TestHsJobBlock {
JobHistory jobHistory = new JobHitoryStubWithAllNormalSizeJobs(); JobHistory jobHistory = new JobHitoryStubWithAllNormalSizeJobs();
jobHistory.init(config); jobHistory.init(config);
Controller.RequestContext rc = mock(Controller.RequestContext.class); HsJobBlock jobBlock = new HsJobBlock(jobHistory) {
ViewContext view = mock(ViewContext.class);
HttpServletRequest req =mock(HttpServletRequest.class);
when(rc.getRequest()).thenReturn(req);
when(view.requestContext()).thenReturn(rc);
HsJobBlock jobBlock = new HsJobBlock(config, jobHistory, view) {
// override this so that the job block can fetch a job id. // override this so that the job block can fetch a job id.
@Override @Override
public Map<String, String> moreParams() { public Map<String, String> moreParams() {